2023-05-22 21:37:12 -06:00
require ( './sourcemap-register.js' ) ; /******/ ( ( ) => { // webpackBootstrap
/******/ var _ _webpack _modules _ _ = ( {
/***/ 7358 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getDiffFiles = exports . getRenamedFiles = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 918 ) ;
const getRenamedFiles = ( { inputs , workingDirectory , hasSubmodule , shaResult } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
let renamedFiles = yield ( 0 , utils _1 . gitRenamedFiles ) ( {
cwd : workingDirectory ,
sha1 : shaResult . previousSha ,
sha2 : shaResult . currentSha ,
diff : shaResult . diff ,
oldNewSeparator : inputs . oldNewSeparator
} ) ;
if ( hasSubmodule ) {
for ( const submodulePath of yield ( 0 , utils _1 . getSubmodulePath ) ( {
cwd : workingDirectory
} ) ) {
const submoduleShaResult = yield ( 0 , utils _1 . gitSubmoduleDiffSHA ) ( {
cwd : workingDirectory ,
parentSha1 : shaResult . previousSha ,
parentSha2 : shaResult . currentSha ,
submodulePath ,
diff : shaResult . diff
} ) ;
const submoduleWorkingDirectory = path . join ( workingDirectory , submodulePath ) ;
if ( submoduleShaResult . currentSha && submoduleShaResult . previousSha ) {
const submoduleRenamedFiles = yield ( 0 , utils _1 . gitRenamedFiles ) ( {
cwd : submoduleWorkingDirectory ,
sha1 : submoduleShaResult . previousSha ,
sha2 : submoduleShaResult . currentSha ,
diff : shaResult . diff ,
oldNewSeparator : inputs . oldNewSeparator ,
isSubmodule : true
} ) ;
renamedFiles . push ( ... submoduleRenamedFiles ) ;
}
}
}
if ( inputs . dirNames ) {
renamedFiles = renamedFiles . map ( renamedFile => ( 0 , utils _1 . getDirnameMaxDepth ) ( {
pathStr : renamedFile ,
dirNamesMaxDepth : inputs . dirNamesMaxDepth ,
excludeRoot : inputs . dirNamesExcludeRoot
} ) ) ;
}
if ( inputs . json ) {
2023-05-22 23:09:53 -06:00
return ( 0 , utils _1 . jsonOutput ) ( { value : renamedFiles , escape : inputs . escapeJson } ) ;
2023-05-22 21:37:12 -06:00
}
return renamedFiles . join ( inputs . oldNewFilesSeparator ) ;
} ) ;
exports . getRenamedFiles = getRenamedFiles ;
const getDiffFiles = ( { inputs , workingDirectory , hasSubmodule , shaResult , diffFilter , filePatterns = [ ] } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
let files = yield ( 0 , utils _1 . gitDiff ) ( {
cwd : workingDirectory ,
sha1 : shaResult . previousSha ,
sha2 : shaResult . currentSha ,
diff : shaResult . diff ,
diffFilter ,
filePatterns
} ) ;
if ( hasSubmodule ) {
for ( const submodulePath of yield ( 0 , utils _1 . getSubmodulePath ) ( {
cwd : workingDirectory
} ) ) {
const submoduleShaResult = yield ( 0 , utils _1 . gitSubmoduleDiffSHA ) ( {
cwd : workingDirectory ,
parentSha1 : shaResult . previousSha ,
parentSha2 : shaResult . currentSha ,
submodulePath ,
diff : shaResult . diff
} ) ;
const submoduleWorkingDirectory = path . join ( workingDirectory , submodulePath ) ;
if ( submoduleShaResult . currentSha && submoduleShaResult . previousSha ) {
const submoduleFiles = yield ( 0 , utils _1 . gitDiff ) ( {
cwd : submoduleWorkingDirectory ,
sha1 : submoduleShaResult . previousSha ,
sha2 : submoduleShaResult . currentSha ,
diff : shaResult . diff ,
diffFilter ,
isSubmodule : true ,
filePatterns
} ) ;
files . push ( ... submoduleFiles ) ;
}
}
}
if ( inputs . dirNames ) {
files = files . map ( file => ( 0 , utils _1 . getDirnameMaxDepth ) ( {
pathStr : file ,
dirNamesMaxDepth : inputs . dirNamesMaxDepth ,
excludeRoot : inputs . dirNamesExcludeRoot
} ) ) ;
}
if ( inputs . json ) {
2023-05-22 23:09:53 -06:00
return ( 0 , utils _1 . jsonOutput ) ( { value : files , escape : inputs . escapeJson } ) ;
2023-05-22 21:37:12 -06:00
}
return files . join ( inputs . separator ) ;
} ) ;
exports . getDiffFiles = getDiffFiles ;
/***/ } ) ,
/***/ 8613 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getSHAForPullRequestEvent = exports . getSHAForPushEvent = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 918 ) ;
const getCurrentSHA = ( { inputs , workingDirectory } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
let currentSha = '' ;
core . debug ( 'Getting current SHA...' ) ;
if ( inputs . until ) {
core . debug ( ` Getting base SHA for ' ${ inputs . until } '... ` ) ;
try {
currentSha = yield ( 0 , utils _1 . gitLog ) ( {
cwd : workingDirectory ,
args : [
'--format' ,
'"%H"' ,
'-n' ,
'1' ,
'--date' ,
'local' ,
'--until' ,
inputs . until
]
} ) ;
}
catch ( error ) {
2023-05-22 22:00:25 -06:00
core . error ( ` Invalid until date: ${ inputs . until } . ${ error . message } ` ) ;
2023-05-22 21:37:12 -06:00
throw error ;
}
}
else {
if ( ! currentSha ) {
currentSha = yield ( 0 , utils _1 . getHeadSha ) ( { cwd : workingDirectory } ) ;
}
}
yield ( 0 , utils _1 . verifyCommitSha ) ( { sha : currentSha , cwd : workingDirectory } ) ;
core . debug ( ` Current SHA: ${ currentSha } ` ) ;
return currentSha ;
} ) ;
const getSHAForPushEvent = ( inputs , env , workingDirectory , isShallow , hasSubmodule , gitExtraArgs , isTag ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
let targetBranch = env . GITHUB _REFNAME ;
2023-05-22 22:00:25 -06:00
const currentBranch = targetBranch ;
2023-05-22 21:37:12 -06:00
let initialCommit = false ;
let currentSha = inputs . sha ;
let previousSha = inputs . baseSha ;
2023-05-22 22:00:25 -06:00
const diff = '..' ;
2023-05-22 21:37:12 -06:00
if ( isShallow ) {
core . info ( 'Repository is shallow, fetching more history...' ) ;
if ( isTag ) {
const sourceBranch = env . GITHUB _EVENT _BASE _REF . replace ( 'refs/heads/' , '' ) ;
yield ( 0 , utils _1 . gitFetch ) ( {
cwd : workingDirectory ,
args : [
... gitExtraArgs ,
'-u' ,
'--progress' ,
` --deepen= ${ inputs . fetchDepth } ` ,
'origin' ,
` +refs/tags/ ${ sourceBranch } :refs/remotes/origin/ ${ sourceBranch } `
]
} ) ;
}
else {
yield ( 0 , utils _1 . gitFetch ) ( {
cwd : workingDirectory ,
args : [
... gitExtraArgs ,
'-u' ,
'--progress' ,
` --deepen= ${ inputs . fetchDepth } ` ,
'origin' ,
` +refs/heads/ ${ targetBranch } :refs/remotes/origin/ ${ targetBranch } `
]
} ) ;
}
if ( hasSubmodule ) {
yield ( 0 , utils _1 . gitFetchSubmodules ) ( {
cwd : workingDirectory ,
args : [
... gitExtraArgs ,
'-u' ,
'--progress' ,
` --deepen= ${ inputs . fetchDepth } `
]
} ) ;
}
}
if ( previousSha && currentSha && currentBranch && targetBranch ) {
yield ( 0 , utils _1 . verifyCommitSha ) ( { sha : currentSha , cwd : workingDirectory } ) ;
yield ( 0 , utils _1 . verifyCommitSha ) ( { sha : previousSha , cwd : workingDirectory } ) ;
core . info ( ` Previous SHA: ${ previousSha } ` ) ;
core . info ( ` Current SHA: ${ currentSha } ` ) ;
return {
previousSha ,
currentSha ,
currentBranch ,
targetBranch ,
diff
} ;
}
currentSha = yield getCurrentSHA ( { inputs , workingDirectory } ) ;
if ( ! previousSha ) {
core . debug ( 'Getting previous SHA...' ) ;
if ( inputs . since ) {
core . debug ( ` Getting base SHA for ' ${ inputs . since } '... ` ) ;
try {
previousSha = yield ( 0 , utils _1 . gitLog ) ( {
cwd : workingDirectory ,
args : [
'--format' ,
'"%H"' ,
'-n' ,
'1' ,
'--date' ,
'local' ,
'--since' ,
inputs . since
]
} ) ;
}
catch ( error ) {
2023-05-22 22:00:25 -06:00
core . error ( ` Invalid since date: ${ inputs . since } . ${ error . message } ` ) ;
2023-05-22 21:37:12 -06:00
throw error ;
}
}
else if ( isTag ) {
core . debug ( 'Getting previous SHA for tag...' ) ;
const { sha , tag } = yield ( 0 , utils _1 . getPreviousGitTag ) ( { cwd : workingDirectory } ) ;
previousSha = sha ;
targetBranch = tag ;
}
else {
if ( inputs . sinceLastRemoteCommit ) {
core . debug ( 'Getting previous SHA for last remote commit...' ) ;
if ( env . GITHUB _EVENT _FORCED === 'false' || ! env . GITHUB _EVENT _FORCED ) {
previousSha = env . GITHUB _EVENT _BEFORE ;
}
else {
previousSha = yield ( 0 , utils _1 . getParentHeadSha ) ( { cwd : workingDirectory } ) ;
}
}
else {
core . debug ( 'Getting previous SHA for last commit...' ) ;
previousSha = yield ( 0 , utils _1 . getParentHeadSha ) ( { cwd : workingDirectory } ) ;
}
if ( ! previousSha ||
previousSha === '0000000000000000000000000000000000000000' ) {
previousSha = yield ( 0 , utils _1 . getParentHeadSha ) ( { cwd : workingDirectory } ) ;
}
if ( previousSha === currentSha ) {
if ( ! ( yield ( 0 , utils _1 . getParentHeadSha ) ( { cwd : workingDirectory } ) ) ) {
core . warning ( 'Initial commit detected no previous commit found.' ) ;
initialCommit = true ;
previousSha = currentSha ;
}
else {
previousSha = yield ( 0 , utils _1 . getParentHeadSha ) ( { cwd : workingDirectory } ) ;
}
}
else {
if ( ! previousSha ) {
core . error ( 'Unable to locate a previous commit.' ) ;
throw new Error ( 'Unable to locate a previous commit.' ) ;
}
}
}
}
yield ( 0 , utils _1 . verifyCommitSha ) ( { sha : previousSha , cwd : workingDirectory } ) ;
core . debug ( ` Previous SHA: ${ previousSha } ` ) ;
core . debug ( ` Target branch: ${ targetBranch } ` ) ;
core . debug ( ` Current branch: ${ currentBranch } ` ) ;
if ( ! initialCommit && previousSha === currentSha ) {
core . error ( ` Similar commit hashes detected: previous sha: ${ previousSha } is equivalent to the current sha: ${ currentSha } . ` ) ;
core . error ( ` Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${ inputs . fetchDepth } . ` ) ;
throw new Error ( 'Similar commit hashes detected.' ) ;
}
return {
previousSha ,
currentSha ,
currentBranch ,
targetBranch ,
diff
} ;
} ) ;
exports . getSHAForPushEvent = getSHAForPushEvent ;
const getSHAForPullRequestEvent = ( inputs , env , workingDirectory , isShallow , hasSubmodule , gitExtraArgs ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
let targetBranch = env . GITHUB _EVENT _PULL _REQUEST _BASE _REF ;
2023-05-22 22:00:25 -06:00
const currentBranch = env . GITHUB _EVENT _PULL _REQUEST _HEAD _REF ;
2023-05-22 21:37:12 -06:00
let currentSha = inputs . sha ;
let previousSha = inputs . baseSha ;
let diff = '...' ;
if ( inputs . sinceLastRemoteCommit ) {
targetBranch = currentBranch ;
}
if ( isShallow ) {
core . info ( 'Repository is shallow, fetching more history...' ) ;
const prFetchExitCode = yield ( 0 , utils _1 . gitFetch ) ( {
cwd : workingDirectory ,
args : [
... gitExtraArgs ,
'-u' ,
'--progress' ,
'origin' ,
` pull/ ${ env . GITHUB _EVENT _PULL _REQUEST _NUMBER } /head: ${ currentBranch } `
]
} ) ;
if ( prFetchExitCode !== 0 ) {
yield ( 0 , utils _1 . gitFetch ) ( {
cwd : workingDirectory ,
args : [
... gitExtraArgs ,
'-u' ,
'--progress' ,
` --deepen= ${ inputs . fetchDepth } ` ,
'origin' ,
` +refs/heads/ ${ currentBranch } *:refs/remotes/origin/ ${ currentBranch } * `
]
} ) ;
}
if ( ! inputs . sinceLastRemoteCommit ) {
core . debug ( 'Fetching target branch...' ) ;
yield ( 0 , utils _1 . gitFetch ) ( {
cwd : workingDirectory ,
args : [
... gitExtraArgs ,
'-u' ,
'--progress' ,
` --deepen= ${ inputs . fetchDepth } ` ,
'origin' ,
` +refs/heads/ ${ targetBranch } :refs/remotes/origin/ ${ targetBranch } `
]
} ) ;
if ( hasSubmodule ) {
yield ( 0 , utils _1 . gitFetchSubmodules ) ( {
cwd : workingDirectory ,
args : [
... gitExtraArgs ,
'-u' ,
'--progress' ,
` --deepen= ${ inputs . fetchDepth } `
]
} ) ;
}
}
}
if ( previousSha && currentSha && currentBranch && targetBranch ) {
yield ( 0 , utils _1 . verifyCommitSha ) ( { sha : currentSha , cwd : workingDirectory } ) ;
yield ( 0 , utils _1 . verifyCommitSha ) ( { sha : previousSha , cwd : workingDirectory } ) ;
core . info ( ` Previous SHA: ${ previousSha } ` ) ;
core . info ( ` Current SHA: ${ currentSha } ` ) ;
return {
previousSha ,
currentSha ,
currentBranch ,
targetBranch ,
diff
} ;
}
currentSha = yield getCurrentSHA ( { inputs , workingDirectory } ) ;
if ( ! env . GITHUB _EVENT _PULL _REQUEST _BASE _REF ||
2023-05-22 22:00:25 -06:00
env . GITHUB _EVENT _HEAD _REPO _FORK === 'true' ) {
2023-05-22 21:37:12 -06:00
diff = '..' ;
}
if ( ! previousSha ) {
if ( inputs . sinceLastRemoteCommit ) {
previousSha = env . GITHUB _EVENT _BEFORE ;
if ( ( yield ( 0 , utils _1 . verifyCommitSha ) ( {
sha : currentSha ,
cwd : workingDirectory ,
showAsErrorMessage : false
} ) ) !== 0 ) {
previousSha = env . GITHUB _EVENT _PULL _REQUEST _BASE _SHA ;
}
}
else {
previousSha = yield ( 0 , utils _1 . getBranchHeadSha ) ( {
cwd : workingDirectory ,
branch : ` origin/ ${ targetBranch } `
} ) ;
if ( isShallow ) {
if ( yield ( 0 , utils _1 . canDiffCommits ) ( {
cwd : workingDirectory ,
sha1 : previousSha ,
sha2 : currentSha ,
diff
} ) ) {
core . debug ( 'Merge base is not in the local history, fetching remote target branch...' ) ;
for ( let i = 1 ; i <= 10 ; i ++ ) {
yield ( 0 , utils _1 . gitFetch ) ( {
cwd : workingDirectory ,
args : [
... gitExtraArgs ,
'-u' ,
'--progress' ,
` --deepen= ${ inputs . fetchDepth } ` ,
'origin' ,
` +refs/heads/ ${ targetBranch } :refs/remotes/origin/ ${ targetBranch } `
]
} ) ;
if ( yield ( 0 , utils _1 . canDiffCommits ) ( {
cwd : workingDirectory ,
sha1 : previousSha ,
sha2 : currentSha ,
diff
} ) ) {
break ;
}
core . debug ( 'Merge base is not in the local history, fetching remote target branch again...' ) ;
core . debug ( ` Attempt ${ i } /10 ` ) ;
}
}
}
}
if ( ! previousSha || previousSha === currentSha ) {
previousSha = env . GITHUB _EVENT _PULL _REQUEST _BASE _SHA ;
}
}
if ( ! ( yield ( 0 , utils _1 . canDiffCommits ) ( {
cwd : workingDirectory ,
sha1 : previousSha ,
sha2 : currentSha ,
diff
} ) ) ) {
diff = '..' ;
}
yield ( 0 , utils _1 . verifyCommitSha ) ( { sha : previousSha , cwd : workingDirectory } ) ;
core . debug ( ` Previous SHA: ${ previousSha } ` ) ;
if ( ! ( yield ( 0 , utils _1 . canDiffCommits ) ( {
cwd : workingDirectory ,
sha1 : previousSha ,
sha2 : currentSha ,
diff
} ) ) ) {
throw new Error ( ` Unable to determine a difference between ${ previousSha } ${ diff } ${ currentSha } ` ) ;
}
return {
previousSha ,
currentSha ,
currentBranch ,
targetBranch ,
diff
} ;
} ) ;
exports . getSHAForPullRequestEvent = getSHAForPullRequestEvent ;
/***/ } ) ,
/***/ 9763 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getEnv = void 0 ;
const getEnv = ( ) => {
return {
GITHUB _EVENT _PULL _REQUEST _HEAD _REF : process . env . GITHUB _EVENT _PULL _REQUEST _HEAD _REF || '' ,
GITHUB _EVENT _PULL _REQUEST _BASE _REF : process . env . GITHUB _EVENT _PULL _REQUEST _BASE _REF || '' ,
GITHUB _EVENT _BEFORE : process . env . GITHUB _EVENT _BEFORE || '' ,
GITHUB _REFNAME : process . env . GITHUB _REFNAME || '' ,
GITHUB _REF : process . env . GITHUB _REF || '' ,
GITHUB _EVENT _BASE _REF : process . env . GITHUB _EVENT _BASE _REF || '' ,
GITHUB _EVENT _HEAD _REPO _FORK : process . env . GITHUB _EVENT _HEAD _REPO _FORK || '' ,
GITHUB _WORKSPACE : process . env . GITHUB _WORKSPACE || '' ,
GITHUB _EVENT _FORCED : process . env . GITHUB _EVENT _FORCED || '' ,
GITHUB _EVENT _PULL _REQUEST _NUMBER : process . env . GITHUB _EVENT _PULL _REQUEST _NUMBER || '' ,
GITHUB _EVENT _PULL _REQUEST _BASE _SHA : process . env . GITHUB _EVENT _PULL _REQUEST _BASE _SHA || ''
} ;
} ;
exports . getEnv = getEnv ;
/***/ } ) ,
/***/ 6180 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getInputs = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const getInputs = ( ) => {
const files = core . getInput ( 'files' , { required : false } ) ;
2023-05-22 21:43:40 -06:00
const filesSeparator = core . getInput ( 'files_separator' , {
2023-05-22 21:37:12 -06:00
required : false ,
trimWhitespace : false
} ) ;
2023-05-22 21:43:40 -06:00
const filesIgnore = core . getInput ( 'files_ignore' , { required : false } ) ;
const filesIgnoreSeparator = core . getInput ( 'files_ignore_separator' , {
2023-05-22 21:37:12 -06:00
required : false ,
trimWhitespace : false
} ) ;
2023-05-22 21:43:40 -06:00
const filesFromSourceFile = core . getInput ( 'files_from_source_file' , {
2023-05-22 21:37:12 -06:00
required : false
} ) ;
2023-05-22 21:43:40 -06:00
const filesFromSourceFileSeparator = core . getInput ( 'files_from_source_file_separator' , {
2023-05-22 21:37:12 -06:00
required : false ,
trimWhitespace : false
} ) ;
2023-05-22 21:43:40 -06:00
const filesIgnoreFromSourceFile = core . getInput ( 'files_ignore_from_source_file' , { required : false } ) ;
const filesIgnoreFromSourceFileSeparator = core . getInput ( 'files_ignore_from_source_file_separator' , {
2023-05-22 21:37:12 -06:00
required : false ,
trimWhitespace : false
} ) ;
const separator = core . getInput ( 'separator' , {
required : true ,
trimWhitespace : false
} ) ;
2023-05-22 21:43:40 -06:00
const includeAllOldNewRenamedFiles = core . getBooleanInput ( 'include_all_old_new_renamed_files' , { required : false } ) ;
const oldNewSeparator = core . getInput ( 'old_new_separator' , {
2023-05-22 21:37:12 -06:00
required : true ,
trimWhitespace : false
} ) ;
2023-05-22 21:43:40 -06:00
const oldNewFilesSeparator = core . getInput ( 'old_new_files_separator' , {
2023-05-22 21:37:12 -06:00
required : true ,
trimWhitespace : false
} ) ;
const sha = core . getInput ( 'sha' , { required : false } ) ;
2023-05-22 21:43:40 -06:00
const baseSha = core . getInput ( 'base_sha' , { required : false } ) ;
2023-05-22 21:37:12 -06:00
const since = core . getInput ( 'since' , { required : false } ) ;
const until = core . getInput ( 'until' , { required : false } ) ;
const path = core . getInput ( 'path' , { required : false } ) ;
const quotePath = core . getBooleanInput ( 'quotepath' , { required : false } ) ;
2023-05-22 21:43:40 -06:00
const diffRelative = core . getBooleanInput ( 'diff_relative' , { required : false } ) ;
const dirNames = core . getBooleanInput ( 'dir_names' , { required : false } ) ;
const dirNamesMaxDepth = core . getInput ( 'dir_names_max_depth' , {
2023-05-22 21:37:12 -06:00
required : false
} ) ;
2023-05-22 21:43:40 -06:00
const dirNamesExcludeRoot = core . getBooleanInput ( 'dir_names_exclude_root' , {
2023-05-22 21:37:12 -06:00
required : false
} ) ;
const json = core . getBooleanInput ( 'json' , { required : false } ) ;
2023-05-22 23:09:53 -06:00
const escapeJson = core . getBooleanInput ( 'escape_json' , { required : false } ) ;
2023-05-22 21:43:40 -06:00
const fetchDepth = core . getInput ( 'fetch_depth' , { required : false } ) ;
const sinceLastRemoteCommit = core . getBooleanInput ( 'since_last_remote_commit' , { required : false } ) ;
const writeOutputFiles = core . getBooleanInput ( 'write_output_files' , {
2023-05-22 21:37:12 -06:00
required : false
} ) ;
2023-05-22 21:43:40 -06:00
const outputDir = core . getInput ( 'output_dir' , { required : false } ) ;
const matchDirectories = core . getBooleanInput ( 'match_directories' , {
2023-05-22 21:37:12 -06:00
required : false
} ) ;
const inputs = {
files ,
filesSeparator ,
filesFromSourceFile ,
filesFromSourceFileSeparator ,
filesIgnore ,
filesIgnoreSeparator ,
filesIgnoreFromSourceFile ,
filesIgnoreFromSourceFileSeparator ,
separator ,
includeAllOldNewRenamedFiles ,
oldNewSeparator ,
oldNewFilesSeparator ,
sha ,
baseSha ,
since ,
until ,
path ,
quotePath ,
diffRelative ,
dirNames ,
dirNamesExcludeRoot ,
json ,
2023-05-22 23:09:53 -06:00
escapeJson ,
2023-05-22 21:37:12 -06:00
sinceLastRemoteCommit ,
writeOutputFiles ,
outputDir ,
matchDirectories
} ;
if ( fetchDepth ) {
inputs . fetchDepth = parseInt ( fetchDepth , 10 ) ;
}
if ( dirNamesMaxDepth ) {
inputs . dirNamesMaxDepth = parseInt ( dirNamesMaxDepth , 10 ) ;
}
return inputs ;
} ;
exports . getInputs = getInputs ;
/***/ } ) ,
/***/ 3109 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . run = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const path _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const changedFiles _1 = _ _nccwpck _require _ _ ( 7358 ) ;
const commitSha _1 = _ _nccwpck _require _ _ ( 8613 ) ;
const env _1 = _ _nccwpck _require _ _ ( 9763 ) ;
const inputs _1 = _ _nccwpck _require _ _ ( 6180 ) ;
const utils _1 = _ _nccwpck _require _ _ ( 918 ) ;
function run ( ) {
var _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const env = ( 0 , env _1 . getEnv ) ( ) ;
2023-05-22 23:09:53 -06:00
core . debug ( ` Env: ${ JSON . stringify ( process . env , null , 2 ) } ` ) ;
2023-05-22 21:37:12 -06:00
const inputs = ( 0 , inputs _1 . getInputs ) ( ) ;
2023-05-22 22:32:30 -06:00
core . debug ( ` Inputs: ${ JSON . stringify ( inputs , null , 2 ) } ` ) ;
2023-05-22 21:37:12 -06:00
yield ( 0 , utils _1 . verifyMinimumGitVersion ) ( ) ;
let quotePathValue = 'on' ;
if ( ! inputs . quotePath ) {
quotePathValue = 'off' ;
}
yield ( 0 , utils _1 . updateGitGlobalConfig ) ( {
name : 'core.quotepath' ,
value : quotePathValue
} ) ;
if ( inputs . diffRelative ) {
yield ( 0 , utils _1 . updateGitGlobalConfig ) ( {
name : 'diff.relative' ,
value : 'true'
} ) ;
}
const workingDirectory = path _1 . default . resolve ( env . GITHUB _WORKSPACE || process . cwd ( ) , inputs . path ) ;
const isShallow = yield ( 0 , utils _1 . isRepoShallow ) ( { cwd : workingDirectory } ) ;
const hasSubmodule = yield ( 0 , utils _1 . submoduleExists ) ( { cwd : workingDirectory } ) ;
let gitExtraArgs = [ '--no-tags' , '--prune' , '--recurse-submodules' ] ;
const isTag = ( _a = env . GITHUB _REF ) === null || _a === void 0 ? void 0 : _a . startsWith ( 'refs/tags/' ) ;
if ( isTag ) {
gitExtraArgs = [ '--prune' , '--no-recurse-submodules' ] ;
}
let shaResult ;
if ( ! env . GITHUB _EVENT _PULL _REQUEST _BASE _REF ) {
core . info ( 'Running on a push event...' ) ;
shaResult = yield ( 0 , commitSha _1 . getSHAForPushEvent ) ( inputs , env , workingDirectory , isShallow , hasSubmodule , gitExtraArgs , isTag ) ;
}
else {
core . info ( 'Running on a pull request event...' ) ;
shaResult = yield ( 0 , commitSha _1 . getSHAForPullRequestEvent ) ( inputs , env , workingDirectory , isShallow , hasSubmodule , gitExtraArgs ) ;
}
const filePatterns = yield ( 0 , utils _1 . getFilePatterns ) ( {
inputs
} ) ;
const addedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'A' ,
filePatterns
} ) ;
core . debug ( ` Added files: ${ addedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'added_files' ,
value : addedFiles ,
inputs
} ) ;
const copiedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'C' ,
filePatterns
} ) ;
core . debug ( ` Copied files: ${ copiedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'copied_files' ,
value : copiedFiles ,
inputs
} ) ;
const modifiedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'M' ,
filePatterns
} ) ;
core . debug ( ` Modified files: ${ modifiedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'modified_files' ,
value : modifiedFiles ,
inputs
} ) ;
const renamedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'R' ,
filePatterns
} ) ;
core . debug ( ` Renamed files: ${ renamedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'renamed_files' ,
value : renamedFiles ,
inputs
} ) ;
const typeChangedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'T' ,
filePatterns
} ) ;
core . debug ( ` Type changed files: ${ typeChangedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'type_changed_files' ,
value : typeChangedFiles ,
inputs
} ) ;
const unmergedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'U' ,
filePatterns
} ) ;
core . debug ( ` Unmerged files: ${ unmergedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'unmerged_files' ,
value : unmergedFiles ,
inputs
} ) ;
const unknownFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'X' ,
filePatterns
} ) ;
core . debug ( ` Unknown files: ${ unknownFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'unknown_files' ,
value : unknownFiles ,
inputs
} ) ;
const allChangedAndModifiedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'ACDMRTUX' ,
filePatterns
} ) ;
core . debug ( ` All changed and modified files: ${ allChangedAndModifiedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'all_changed_and_modified_files' ,
value : allChangedAndModifiedFiles ,
inputs
} ) ;
const allChangedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'ACMR' ,
filePatterns
} ) ;
core . debug ( ` All changed files: ${ allChangedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'all_changed_files' ,
value : allChangedFiles ,
inputs
} ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'any_changed' ,
value : allChangedFiles && filePatterns . length > 0 ,
inputs
} ) ;
const allOtherChangedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'ACMR'
} ) ;
core . debug ( ` All other changed files: ${ allOtherChangedFiles } ` ) ;
const otherChangedFiles = allOtherChangedFiles
. split ( inputs . filesSeparator )
. filter ( filePath => ! allChangedFiles . split ( inputs . filesSeparator ) . includes ( filePath ) ) ;
const onlyChanged = otherChangedFiles . length === 0 && allChangedFiles . length > 0 ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'only_changed' ,
value : onlyChanged ,
inputs
} ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'other_changed_files' ,
value : otherChangedFiles . join ( inputs . filesSeparator ) ,
inputs
} ) ;
const allModifiedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'ACMRD' ,
filePatterns
} ) ;
core . debug ( ` All modified files: ${ allModifiedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'all_modified_files' ,
value : allModifiedFiles ,
inputs
} ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'any_modified' ,
value : allModifiedFiles && filePatterns . length > 0 ,
inputs
} ) ;
const allOtherModifiedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'ACMRD'
} ) ;
const otherModifiedFiles = allOtherModifiedFiles
. split ( inputs . filesSeparator )
. filter ( filePath => ! allModifiedFiles . split ( inputs . filesSeparator ) . includes ( filePath ) ) ;
const onlyModified = otherModifiedFiles . length === 0 && allModifiedFiles . length > 0 ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'only_modified' ,
value : onlyModified ,
inputs
} ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'other_modified_files' ,
value : otherModifiedFiles . join ( inputs . filesSeparator ) ,
inputs
} ) ;
const deletedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'D' ,
filePatterns
} ) ;
core . debug ( ` Deleted files: ${ deletedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'deleted_files' ,
value : deletedFiles ,
inputs
} ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'any_deleted' ,
value : deletedFiles && filePatterns . length > 0 ,
inputs
} ) ;
const allOtherDeletedFiles = yield ( 0 , changedFiles _1 . getDiffFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult ,
diffFilter : 'D'
} ) ;
const otherDeletedFiles = allOtherDeletedFiles
. split ( inputs . filesSeparator )
. filter ( filePath => ! deletedFiles . split ( inputs . filesSeparator ) . includes ( filePath ) ) ;
const onlyDeleted = otherDeletedFiles . length === 0 && deletedFiles . length > 0 ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'only_deleted' ,
value : onlyDeleted ,
inputs
} ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'other_deleted_files' ,
value : otherDeletedFiles . join ( inputs . filesSeparator ) ,
inputs
} ) ;
if ( inputs . includeAllOldNewRenamedFiles ) {
const allOldNewRenamedFiles = yield ( 0 , changedFiles _1 . getRenamedFiles ) ( {
inputs ,
workingDirectory ,
hasSubmodule ,
shaResult
} ) ;
core . debug ( ` All old new renamed files: ${ allOldNewRenamedFiles } ` ) ;
yield ( 0 , utils _1 . setOutput ) ( {
key : 'all_old_new_renamed_files' ,
value : allOldNewRenamedFiles ,
inputs
} ) ;
}
} ) ;
}
exports . run = run ;
/* istanbul ignore if */
if ( ! process . env . TESTING ) {
// eslint-disable-next-line github/no-then
run ( ) . catch ( e => {
core . setFailed ( e . message || e ) ;
} ) ;
}
/***/ } ) ,
/***/ 918 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _asyncValues = ( this && this . _ _asyncValues ) || function ( o ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var m = o [ Symbol . asyncIterator ] , i ;
return m ? m . call ( o ) : ( o = typeof _ _values === "function" ? _ _values ( o ) : o [ Symbol . iterator ] ( ) , i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ) ;
function verb ( n ) { i [ n ] = o [ n ] && function ( v ) { return new Promise ( function ( resolve , reject ) { v = o [ n ] ( v ) , settle ( resolve , reject , v . done , v . value ) ; } ) ; } ; }
function settle ( resolve , reject , d , v ) { Promise . resolve ( v ) . then ( function ( v ) { resolve ( { value : v , done : d } ) ; } , reject ) ; }
} ;
var _ _await = ( this && this . _ _await ) || function ( v ) { return this instanceof _ _await ? ( this . v = v , this ) : new _ _await ( v ) ; }
var _ _asyncGenerator = ( this && this . _ _asyncGenerator ) || function ( thisArg , _arguments , generator ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var g = generator . apply ( thisArg , _arguments || [ ] ) , i , q = [ ] ;
return i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ;
function verb ( n ) { if ( g [ n ] ) i [ n ] = function ( v ) { return new Promise ( function ( a , b ) { q . push ( [ n , v , a , b ] ) > 1 || resume ( n , v ) ; } ) ; } ; }
function resume ( n , v ) { try { step ( g [ n ] ( v ) ) ; } catch ( e ) { settle ( q [ 0 ] [ 3 ] , e ) ; } }
function step ( r ) { r . value instanceof _ _await ? Promise . resolve ( r . value . v ) . then ( fulfill , reject ) : settle ( q [ 0 ] [ 2 ] , r ) ; }
function fulfill ( value ) { resume ( "next" , value ) ; }
function reject ( value ) { resume ( "throw" , value ) ; }
function settle ( f , v ) { if ( f ( v ) , q . shift ( ) , q . length ) resume ( q [ 0 ] [ 0 ] , q [ 0 ] [ 1 ] ) ; }
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2023-05-22 22:17:28 -06:00
exports . setOutput = exports . getFilePatterns = exports . jsonOutput = exports . getDirnameMaxDepth = exports . canDiffCommits = exports . getPreviousGitTag = exports . verifyCommitSha = exports . getBranchHeadSha = exports . getParentHeadSha = exports . getHeadSha = exports . gitLog = exports . gitDiff = exports . gitRenamedFiles = exports . gitSubmoduleDiffSHA = exports . getSubmodulePath = exports . gitFetchSubmodules = exports . gitFetch = exports . submoduleExists = exports . isRepoShallow = exports . updateGitGlobalConfig = exports . getFilesFromSourceFile = exports . getPatterns = exports . exists = exports . verifyMinimumGitVersion = void 0 ;
2023-05-22 22:00:25 -06:00
/*global AsyncIterableIterator*/
2023-05-22 21:37:12 -06:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const exec = _ _importStar ( _ _nccwpck _require _ _ ( 1514 ) ) ;
const internal _match _kind _1 = _ _nccwpck _require _ _ ( 1063 ) ;
2023-05-22 23:32:21 -06:00
const internal _path _helper _1 = _ _nccwpck _require _ _ ( 1849 ) ;
2023-05-22 21:37:12 -06:00
const internal _pattern _1 = _ _nccwpck _require _ _ ( 4536 ) ;
const patternHelper = _ _importStar ( _ _nccwpck _require _ _ ( 9005 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const fs _1 = _ _nccwpck _require _ _ ( 7147 ) ;
const readline _1 = _ _nccwpck _require _ _ ( 4521 ) ;
const MINIMUM _GIT _VERSION = '2.18.0' ;
const versionToNumber = ( version ) => {
const [ major , minor , patch ] = version . split ( '.' ) . map ( Number ) ;
return major * 1000000 + minor * 1000 + patch ;
} ;
const verifyMinimumGitVersion = ( ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ '--version' ] , { silent : true } ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
throw new Error ( stderr || 'An unexpected error occurred' ) ;
}
const gitVersion = stdout . trim ( ) ;
if ( versionToNumber ( gitVersion ) < versionToNumber ( MINIMUM _GIT _VERSION ) ) {
throw new Error ( ` Minimum required git version is ${ MINIMUM _GIT _VERSION } , your version is ${ gitVersion } ` ) ;
}
} ) ;
exports . verifyMinimumGitVersion = verifyMinimumGitVersion ;
function exists ( filePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield fs _1 . promises . access ( filePath ) ;
return true ;
}
catch ( _a ) {
return false ;
}
} ) ;
}
exports . exists = exists ;
function getPatterns ( filePatterns ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const IS _WINDOWS = process . platform === 'win32' ;
const patterns = [ ] ;
if ( IS _WINDOWS ) {
filePatterns = filePatterns . replace ( /\r\n/g , '\n' ) ;
filePatterns = filePatterns . replace ( /\r/g , '\n' ) ;
}
const lines = filePatterns . split ( '\n' ) . map ( filePattern => filePattern . trim ( ) ) ;
for ( let line of lines ) {
// Empty or comment
if ( ! ( ! line || line . startsWith ( '#' ) ) ) {
line = IS _WINDOWS ? line . replace ( /\\/g , '/' ) : line ;
const pattern = new internal _pattern _1 . Pattern ( line ) ;
// @ts-ignore
pattern . minimatch . options . nobrace = false ;
// @ts-ignore
pattern . minimatch . make ( ) ;
patterns . push ( pattern ) ;
if ( pattern . trailingSeparator ||
pattern . segments [ pattern . segments . length - 1 ] !== '**' ) {
patterns . push ( new internal _pattern _1 . Pattern ( pattern . negate , true , pattern . segments . concat ( '**' ) ) ) ;
}
}
}
return patterns ;
} ) ;
}
exports . getPatterns = getPatterns ;
function lineOfFileGenerator ( { filePath , excludedFiles } ) {
return _ _asyncGenerator ( this , arguments , function * lineOfFileGenerator _1 ( ) {
var _a , e _1 , _b , _c ;
const fileStream = ( 0 , fs _1 . createReadStream ) ( filePath ) ;
/* istanbul ignore next */
fileStream . on ( 'error' , error => {
throw error ;
} ) ;
const rl = ( 0 , readline _1 . createInterface ) ( {
input : fileStream ,
crlfDelay : Infinity
} ) ;
try {
for ( var _d = true , rl _1 = _ _asyncValues ( rl ) , rl _1 _1 ; rl _1 _1 = yield _ _await ( rl _1 . next ( ) ) , _a = rl _1 _1 . done , ! _a ; ) {
_c = rl _1 _1 . value ;
_d = false ;
try {
const line = _c ;
if ( ! line . startsWith ( '#' ) && line !== '' ) {
if ( excludedFiles ) {
if ( line . startsWith ( '!' ) ) {
yield yield _ _await ( line ) ;
}
else {
yield yield _ _await ( ` ! ${ line } ` ) ;
}
}
else {
yield yield _ _await ( line ) ;
}
}
}
finally {
_d = true ;
}
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
if ( ! _d && ! _a && ( _b = rl _1 . return ) ) yield _ _await ( _b . call ( rl _1 ) ) ;
}
finally { if ( e _1 ) throw e _1 . error ; }
}
} ) ;
}
function getFilesFromSourceFile ( { filePaths , excludedFiles = false } ) {
var _a , e _2 , _b , _c ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const lines = [ ] ;
for ( const filePath of filePaths ) {
try {
for ( var _d = true , _e = ( e _2 = void 0 , _ _asyncValues ( lineOfFileGenerator ( { filePath , excludedFiles } ) ) ) , _f ; _f = yield _e . next ( ) , _a = _f . done , ! _a ; ) {
_c = _f . value ;
_d = false ;
try {
const line = _c ;
lines . push ( line ) ;
}
finally {
_d = true ;
}
}
}
catch ( e _2 _1 ) { e _2 = { error : e _2 _1 } ; }
finally {
try {
if ( ! _d && ! _a && ( _b = _e . return ) ) yield _b . call ( _e ) ;
}
finally { if ( e _2 ) throw e _2 . error ; }
}
}
return lines ;
} ) ;
}
exports . getFilesFromSourceFile = getFilesFromSourceFile ;
const updateGitGlobalConfig = ( { name , value } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
const { exitCode , stderr } = yield exec . getExecOutput ( 'git' , [ 'config' , '--global' , name , value ] , {
2023-05-22 22:06:33 -06:00
ignoreReturnCode : true ,
silent : true
2023-05-22 21:37:12 -06:00
} ) ;
/* istanbul ignore if */
if ( exitCode !== 0 || stderr ) {
core . warning ( stderr || ` Couldn't update git global config ${ name } ` ) ;
}
} ) ;
exports . updateGitGlobalConfig = updateGitGlobalConfig ;
const isRepoShallow = ( { cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ 'rev-parse' , '--is-shallow-repository' ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
throw new Error ( stderr || 'An unexpected error occurred' ) ;
}
return stdout . trim ( ) === 'true' ;
} ) ;
exports . isRepoShallow = isRepoShallow ;
const submoduleExists = ( { cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ 'submodule' , 'status' ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
throw new Error ( stderr || 'An unexpected error occurred' ) ;
}
return stdout . trim ( ) !== '' ;
} ) ;
exports . submoduleExists = submoduleExists ;
const gitFetch = ( { args , cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stderr } = yield exec . getExecOutput ( 'git' , [ 'fetch' , ... args ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
/* istanbul ignore if */
if ( exitCode !== 0 || stderr ) {
core . warning ( stderr || "Couldn't fetch repository" ) ;
}
return exitCode ;
} ) ;
exports . gitFetch = gitFetch ;
const gitFetchSubmodules = ( { args , cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stderr } = yield exec . getExecOutput ( 'git' , [ 'submodule' , 'foreach' , 'git' , 'fetch' , ... args ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
/* istanbul ignore if */
if ( exitCode !== 0 || stderr ) {
core . warning ( stderr || "Couldn't fetch submodules" ) ;
}
} ) ;
exports . gitFetchSubmodules = gitFetchSubmodules ;
const getSubmodulePath = ( { cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ 'submodule' , 'status' ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
core . warning ( stderr || "Couldn't get submodule names" ) ;
return [ ] ;
}
return stdout
. split ( '\n' )
. filter ( Boolean )
. map ( line => line . split ( ' ' ) [ 1 ] ) ;
} ) ;
exports . getSubmodulePath = getSubmodulePath ;
const gitSubmoduleDiffSHA = ( { cwd , parentSha1 , parentSha2 , submodulePath , diff } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
var _a , _b , _c , _d ;
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ 'diff' , parentSha1 , parentSha2 , '--' , submodulePath ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
throw new Error ( stderr || 'An unexpected error occurred' ) ;
}
const subprojectCommitPreRegex = /^(?<preCommit>-)Subproject commit (?<commitHash>.+)$/m ;
const subprojectCommitCurRegex = /^(?<curCommit>\+)Subproject commit (?<commitHash>.+)$/m ;
const previousSha = ( ( _b = ( _a = subprojectCommitPreRegex . exec ( stdout ) ) === null || _a === void 0 ? void 0 : _a . groups ) === null || _b === void 0 ? void 0 : _b . commitHash ) ||
'4b825dc642cb6eb9a060e54bf8d69288fbee4904' ;
const currentSha = ( _d = ( _c = subprojectCommitCurRegex . exec ( stdout ) ) === null || _c === void 0 ? void 0 : _c . groups ) === null || _d === void 0 ? void 0 : _d . commitHash ;
if ( currentSha ) {
return { previousSha , currentSha } ;
}
2023-05-22 22:06:33 -06:00
core . debug ( ` No submodule commit found for ${ submodulePath } between ${ parentSha1 } ${ diff } ${ parentSha2 } ` ) ;
2023-05-22 21:37:12 -06:00
return { } ;
} ) ;
exports . gitSubmoduleDiffSHA = gitSubmoduleDiffSHA ;
const gitRenamedFiles = ( { cwd , sha1 , sha2 , diff , oldNewSeparator , isSubmodule = false } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
const { exitCode , stderr , stdout } = yield exec . getExecOutput ( 'git' , [
'diff' ,
'--name-status' ,
'--ignore-submodules=all' ,
'--diff-filter=R' ,
` ${ sha1 } ${ diff } ${ sha2 } `
2023-05-22 22:06:33 -06:00
] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
if ( isSubmodule ) {
core . warning ( stderr ||
` Failed to get renamed files for submodule between: ${ sha1 } ${ diff } ${ sha2 } ` ) ;
core . warning ( 'Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage' ) ;
}
else {
core . error ( stderr || ` Failed to get renamed files between: ${ sha1 } ${ diff } ${ sha2 } ` ) ;
throw new Error ( 'Unable to get renamed files' ) ;
}
return [ ] ;
}
return stdout
. trim ( )
. split ( '\n' )
. map ( line => {
const [ , oldPath , newPath ] = line . split ( '\t' ) ;
2023-05-22 22:17:28 -06:00
return ` ${ oldPath } ${ oldNewSeparator } ${ newPath } ` ;
2023-05-22 21:37:12 -06:00
} ) ;
} ) ;
exports . gitRenamedFiles = gitRenamedFiles ;
const gitDiff = ( { cwd , sha1 , sha2 , diff , diffFilter , filePatterns = [ ] , isSubmodule = false } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [
'diff' ,
'--name-only' ,
'--ignore-submodules=all' ,
` --diff-filter= ${ diffFilter } ` ,
` ${ sha1 } ${ diff } ${ sha2 } `
2023-05-22 22:06:33 -06:00
] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
if ( isSubmodule ) {
core . warning ( stderr ||
` Failed to get changed files for submodule between: ${ sha1 } ${ diff } ${ sha2 } ` ) ;
core . warning ( 'Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage' ) ;
}
else {
core . error ( stderr || ` Failed to get changed files between: ${ sha1 } ${ diff } ${ sha2 } ` ) ;
throw new Error ( 'Unable to get changed files' ) ;
}
return [ ] ;
}
2023-05-22 22:17:28 -06:00
return stdout . split ( '\n' ) . filter ( filePath => {
2023-05-22 21:37:12 -06:00
if ( filePatterns . length === 0 ) {
return filePath !== '' ;
}
const match = patternHelper . match ( filePatterns , filePath ) ;
return filePath !== '' && match === internal _match _kind _1 . MatchKind . All ;
2023-05-22 22:17:28 -06:00
} ) ;
2023-05-22 21:37:12 -06:00
} ) ;
exports . gitDiff = gitDiff ;
const gitLog = ( { args , cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ 'log' , ... args ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
throw new Error ( stderr || 'An unexpected error occurred' ) ;
}
return stdout . trim ( ) ;
} ) ;
exports . gitLog = gitLog ;
const getHeadSha = ( { cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ 'rev-parse' , 'HEAD' ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
throw new Error ( stderr || 'Unable to get HEAD sha' ) ;
}
return stdout . trim ( ) ;
} ) ;
exports . getHeadSha = getHeadSha ;
const getParentHeadSha = ( { cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ 'rev-parse' , 'HEAD^' ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
throw new Error ( stderr || 'Unable to get HEAD^ sha' ) ;
}
return stdout . trim ( ) ;
} ) ;
exports . getParentHeadSha = getParentHeadSha ;
const getBranchHeadSha = ( { branch , cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ 'rev-parse' , branch ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
throw new Error ( stderr || ` Unable to get ${ branch } head sha ` ) ;
}
return stdout . trim ( ) ;
} ) ;
exports . getBranchHeadSha = getBranchHeadSha ;
const verifyCommitSha = ( { sha , cwd , showAsErrorMessage = true } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stderr } = yield exec . getExecOutput ( 'git' , [ 'rev-parse' , '--quiet' , '--verify' , ` ${ sha } ^{commit} ` ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
if ( showAsErrorMessage ) {
core . error ( ` Unable to locate the commit sha: ${ sha } ` ) ;
core . error ( "Please verify that the commit sha is correct, and increase the 'fetch_depth' input if needed" ) ;
core . debug ( stderr ) ;
}
else {
core . warning ( ` Unable to locate the commit sha: ${ sha } ` ) ;
core . debug ( stderr ) ;
}
}
return exitCode ;
} ) ;
exports . verifyCommitSha = verifyCommitSha ;
const getPreviousGitTag = ( { cwd } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stdout , stderr } = yield exec . getExecOutput ( 'git' , [ 'tag' , '--sort=-version:refname' ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
throw new Error ( stderr || 'Unable to get previous tag' ) ;
}
const tags = stdout . trim ( ) . split ( '\n' ) ;
if ( tags . length < 2 ) {
core . warning ( 'No previous tag found' ) ;
return { tag : '' , sha : '' } ;
}
const previousTag = tags [ 1 ] ;
2023-05-22 22:06:33 -06:00
const { exitCode : exitCode2 , stdout : stdout2 , stderr : stderr2 } = yield exec . getExecOutput ( 'git' , [ 'rev-parse' , previousTag ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode2 !== 0 ) {
throw new Error ( stderr2 || 'Unable to get previous tag' ) ;
}
const sha = stdout2 . trim ( ) ;
return { tag : previousTag , sha } ;
} ) ;
exports . getPreviousGitTag = getPreviousGitTag ;
const canDiffCommits = ( { cwd , sha1 , sha2 , diff } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
2023-05-22 22:06:33 -06:00
const { exitCode , stderr } = yield exec . getExecOutput ( 'git' , [ 'diff' , '--name-only' , '--ignore-submodules=all' , ` ${ sha1 } ${ diff } ${ sha2 } ` ] , {
cwd ,
silent : true
} ) ;
2023-05-22 21:37:12 -06:00
if ( exitCode !== 0 ) {
core . warning ( stderr || ` Unable find merge base between ${ sha1 } and ${ sha2 } ` ) ;
return false ;
}
return true ;
} ) ;
exports . canDiffCommits = canDiffCommits ;
const getDirnameMaxDepth = ( { pathStr , dirNamesMaxDepth , excludeRoot } ) => {
2023-05-22 23:32:21 -06:00
const pathArr = ( 0 , internal _path _helper _1 . dirname ) ( pathStr ) . split ( path . sep ) ;
2023-05-22 21:37:12 -06:00
const maxDepth = Math . min ( dirNamesMaxDepth || pathArr . length , pathArr . length ) ;
let output = pathArr [ 0 ] ;
for ( let i = 1 ; i < maxDepth ; i ++ ) {
output = path . join ( output , pathArr [ i ] ) ;
}
if ( excludeRoot && output === '.' ) {
return '' ;
}
return output ;
} ;
exports . getDirnameMaxDepth = getDirnameMaxDepth ;
2023-05-22 22:32:30 -06:00
const jsonOutput = ( { value , escape } ) => {
2023-05-22 23:09:53 -06:00
let result = JSON . stringify ( value ) ;
return escape ? result . replace ( /"/g , '\\"' ) : result ;
2023-05-22 21:37:12 -06:00
} ;
exports . jsonOutput = jsonOutput ;
const getFilePatterns = ( { inputs } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
let filesPatterns = inputs . files
. split ( inputs . filesSeparator )
. filter ( p => p !== '' )
. join ( '\n' ) ;
core . debug ( ` files patterns: ${ filesPatterns } ` ) ;
if ( inputs . filesFromSourceFile !== '' ) {
const inputFilesFromSourceFile = inputs . filesFromSourceFile
. split ( inputs . filesFromSourceFileSeparator )
. filter ( p => p !== '' ) ;
const filesFromSourceFiles = ( yield getFilesFromSourceFile ( { filePaths : inputFilesFromSourceFile } ) ) . join ( '\n' ) ;
core . debug ( ` files from source files patterns: ${ filesFromSourceFiles } ` ) ;
filesPatterns = filesPatterns . concat ( '\n' , filesFromSourceFiles ) ;
}
if ( inputs . filesIgnore ) {
const filesIgnorePatterns = inputs . filesIgnore
. split ( inputs . filesIgnoreSeparator )
. filter ( p => p !== '' )
. map ( p => {
if ( ! p . startsWith ( '!' ) ) {
p = ` ! ${ p } ` ;
}
return p ;
} )
. join ( '\n' ) ;
core . debug ( ` files ignore patterns: ${ filesIgnorePatterns } ` ) ;
filesPatterns = filesPatterns . concat ( '\n' , filesIgnorePatterns ) ;
}
if ( inputs . filesIgnoreFromSourceFile ) {
const inputFilesIgnoreFromSourceFile = inputs . filesIgnoreFromSourceFile
. split ( inputs . filesIgnoreFromSourceFileSeparator )
. filter ( p => p !== '' ) ;
const filesIgnoreFromSourceFiles = ( yield getFilesFromSourceFile ( {
filePaths : inputFilesIgnoreFromSourceFile ,
excludedFiles : true
} ) ) . join ( '\n' ) ;
core . debug ( ` files ignore from source files patterns: ${ filesIgnoreFromSourceFiles } ` ) ;
filesPatterns = filesPatterns . concat ( '\n' , filesIgnoreFromSourceFiles ) ;
}
const patterns = yield getPatterns ( filesPatterns ) ;
core . debug ( ` patterns: ${ patterns } ` ) ;
return patterns ;
} ) ;
exports . getFilePatterns = getFilePatterns ;
const setOutput = ( { key , value , inputs } ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
const cleanedValue = value . toString ( ) . trim ( ) ;
if ( inputs . writeOutputFiles ) {
const outputDir = inputs . outputDir || '.github/outputs' ;
const extension = inputs . json ? 'json' : 'txt' ;
const outputFilePath = path . join ( outputDir , ` ${ key } . ${ extension } ` ) ;
if ( ! ( yield exists ( outputDir ) ) ) {
yield fs _1 . promises . mkdir ( outputDir , { recursive : true } ) ;
}
yield fs _1 . promises . writeFile ( outputFilePath , cleanedValue ) ;
}
else {
core . setOutput ( key , cleanedValue ) ;
}
} ) ;
exports . setOutput = setOutput ;
/***/ } ) ,
/***/ 7351 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . issue = exports . issueCommand = void 0 ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
}
}
function escapeData ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
/***/ } ) ,
/***/ 2186 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getIDToken = exports . getState = exports . saveState = exports . group = exports . endGroup = exports . startGroup = exports . info = exports . notice = exports . warning = exports . error = exports . debug = exports . isDebug = exports . setFailed = exports . setCommandEcho = exports . setOutput = exports . getBooleanInput = exports . getMultilineInput = exports . getInput = exports . addPath = exports . setSecret = exports . exportVariable = exports . ExitCode = void 0 ;
const command _1 = _ _nccwpck _require _ _ ( 7351 ) ;
const file _command _1 = _ _nccwpck _require _ _ ( 717 ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const oidc _utils _1 = _ _nccwpck _require _ _ ( 8041 ) ;
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
const convertedVal = utils _1 . toCommandValue ( val ) ;
process . env [ name ] = convertedVal ;
const filePath = process . env [ 'GITHUB_ENV' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'ENV' , file _command _1 . prepareKeyValueMessage ( name , val ) ) ;
}
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
const filePath = process . env [ 'GITHUB_PATH' ] || '' ;
if ( filePath ) {
file _command _1 . issueFileCommand ( 'PATH' , inputPath ) ;
}
else {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
}
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
* Gets the value of an input .
* Unless trimWhitespace is set to false in InputOptions , the value is also trimmed .
* Returns an empty string if the value is not defined .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
if ( options && options . trimWhitespace === false ) {
return val ;
}
return val . trim ( ) ;
}
exports . getInput = getInput ;
/ * *
* Gets the values of an multiline input . Each value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string [ ]
*
* /
function getMultilineInput ( name , options ) {
const inputs = getInput ( name , options )
. split ( '\n' )
. filter ( x => x !== '' ) ;
if ( options && options . trimWhitespace === false ) {
return inputs ;
}
return inputs . map ( input => input . trim ( ) ) ;
}
exports . getMultilineInput = getMultilineInput ;
/ * *
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification .
* Support boolean input list : ` true | True | TRUE | false | False | FALSE ` .
* The return value is also in boolean type .
* ref : https : //yaml.org/spec/1.2/spec.html#id2804923
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns boolean
* /
function getBooleanInput ( name , options ) {
const trueValue = [ 'true' , 'True' , 'TRUE' ] ;
const falseValue = [ 'false' , 'False' , 'FALSE' ] ;
const val = getInput ( name , options ) ;
if ( trueValue . includes ( val ) )
return true ;
if ( falseValue . includes ( val ) )
return false ;
throw new TypeError ( ` Input does not meet YAML 1.2 "Core Schema" specification: ${ name } \n ` +
` Support boolean input list: \` true | True | TRUE | false | False | FALSE \` ` ) ;
}
exports . getBooleanInput = getBooleanInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
const filePath = process . env [ 'GITHUB_OUTPUT' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'OUTPUT' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
process . stdout . write ( os . EOL ) ;
command _1 . issueCommand ( 'set-output' , { name } , utils _1 . toCommandValue ( value ) ) ;
}
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
* /
function error ( message , properties = { } ) {
command _1 . issueCommand ( 'error' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . error = error ;
/ * *
* Adds a warning issue
* @ param message warning issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
* /
function warning ( message , properties = { } ) {
command _1 . issueCommand ( 'warning' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . warning = warning ;
/ * *
* Adds a notice issue
* @ param message notice issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
* /
function notice ( message , properties = { } ) {
command _1 . issueCommand ( 'notice' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . notice = notice ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
const filePath = process . env [ 'GITHUB_STATE' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'STATE' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
command _1 . issueCommand ( 'save-state' , { name } , utils _1 . toCommandValue ( value ) ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
function getIDToken ( aud ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield oidc _utils _1 . OidcClient . getIDToken ( aud ) ;
} ) ;
}
exports . getIDToken = getIDToken ;
/ * *
* Summary exports
* /
var summary _1 = _ _nccwpck _require _ _ ( 1327 ) ;
Object . defineProperty ( exports , "summary" , ( { enumerable : true , get : function ( ) { return summary _1 . summary ; } } ) ) ;
/ * *
* @ deprecated use core . summary
* /
var summary _2 = _ _nccwpck _require _ _ ( 1327 ) ;
Object . defineProperty ( exports , "markdownSummary" , ( { enumerable : true , get : function ( ) { return summary _2 . markdownSummary ; } } ) ) ;
/ * *
* Path exports
* /
var path _utils _1 = _ _nccwpck _require _ _ ( 2981 ) ;
Object . defineProperty ( exports , "toPosixPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPosixPath ; } } ) ) ;
Object . defineProperty ( exports , "toWin32Path" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toWin32Path ; } } ) ) ;
Object . defineProperty ( exports , "toPlatformPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPlatformPath ; } } ) ) ;
//# sourceMappingURL=core.js.map
/***/ } ) ,
/***/ 717 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
// For internal use, subject to change.
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . prepareKeyValueMessage = exports . issueFileCommand = void 0 ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const uuid _1 = _ _nccwpck _require _ _ ( 5840 ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
function issueFileCommand ( command , message ) {
const filePath = process . env [ ` GITHUB_ ${ command } ` ] ;
if ( ! filePath ) {
throw new Error ( ` Unable to find environment variable for file command ${ command } ` ) ;
}
if ( ! fs . existsSync ( filePath ) ) {
throw new Error ( ` Missing file at path: ${ filePath } ` ) ;
}
fs . appendFileSync ( filePath , ` ${ utils _1 . toCommandValue ( message ) } ${ os . EOL } ` , {
encoding : 'utf8'
} ) ;
}
exports . issueFileCommand = issueFileCommand ;
function prepareKeyValueMessage ( key , value ) {
const delimiter = ` ghadelimiter_ ${ uuid _1 . v4 ( ) } ` ;
const convertedValue = utils _1 . toCommandValue ( value ) ;
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if ( key . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: name should not contain the delimiter " ${ delimiter } " ` ) ;
}
if ( convertedValue . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: value should not contain the delimiter " ${ delimiter } " ` ) ;
}
return ` ${ key } << ${ delimiter } ${ os . EOL } ${ convertedValue } ${ os . EOL } ${ delimiter } ` ;
}
exports . prepareKeyValueMessage = prepareKeyValueMessage ;
//# sourceMappingURL=file-command.js.map
/***/ } ) ,
/***/ 8041 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . OidcClient = void 0 ;
const http _client _1 = _ _nccwpck _require _ _ ( 6255 ) ;
const auth _1 = _ _nccwpck _require _ _ ( 5526 ) ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
class OidcClient {
static createHttpClient ( allowRetry = true , maxRetry = 10 ) {
const requestOptions = {
allowRetries : allowRetry ,
maxRetries : maxRetry
} ;
return new http _client _1 . HttpClient ( 'actions/oidc-client' , [ new auth _1 . BearerCredentialHandler ( OidcClient . getRequestToken ( ) ) ] , requestOptions ) ;
}
static getRequestToken ( ) {
const token = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_TOKEN' ] ;
if ( ! token ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable' ) ;
}
return token ;
}
static getIDTokenUrl ( ) {
const runtimeUrl = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_URL' ] ;
if ( ! runtimeUrl ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable' ) ;
}
return runtimeUrl ;
}
static getCall ( id _token _url ) {
var _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpclient = OidcClient . createHttpClient ( ) ;
const res = yield httpclient
. getJson ( id _token _url )
. catch ( error => {
throw new Error ( ` Failed to get ID Token. \n
Error Code : $ { error . statusCode } \ n
Error Message : $ { error . result . message } ` );
} ) ;
const id _token = ( _a = res . result ) === null || _a === void 0 ? void 0 : _a . value ;
if ( ! id _token ) {
throw new Error ( 'Response json body do not have ID Token field' ) ;
}
return id _token ;
} ) ;
}
static getIDToken ( audience ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
// New ID Token is requested from action service
let id _token _url = OidcClient . getIDTokenUrl ( ) ;
if ( audience ) {
const encodedAudience = encodeURIComponent ( audience ) ;
id _token _url = ` ${ id _token _url } &audience= ${ encodedAudience } ` ;
}
core _1 . debug ( ` ID token url is ${ id _token _url } ` ) ;
const id _token = yield OidcClient . getCall ( id _token _url ) ;
core _1 . setSecret ( id _token ) ;
return id _token ;
}
catch ( error ) {
throw new Error ( ` Error message: ${ error . message } ` ) ;
}
} ) ;
}
}
exports . OidcClient = OidcClient ;
//# sourceMappingURL=oidc-utils.js.map
/***/ } ) ,
/***/ 2981 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . toPlatformPath = exports . toWin32Path = exports . toPosixPath = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
/ * *
* toPosixPath converts the given path to the posix form . On Windows , \ \ will be
* replaced with / .
*
* @ param pth . Path to transform .
* @ return string Posix path .
* /
function toPosixPath ( pth ) {
return pth . replace ( /[\\]/g , '/' ) ;
}
exports . toPosixPath = toPosixPath ;
/ * *
* toWin32Path converts the given path to the win32 form . On Linux , / w i l l b e
* replaced with \ \ .
*
* @ param pth . Path to transform .
* @ return string Win32 path .
* /
function toWin32Path ( pth ) {
return pth . replace ( /[/]/g , '\\' ) ;
}
exports . toWin32Path = toWin32Path ;
/ * *
* toPlatformPath converts the given path to a platform - specific path . It does
* this by replacing instances of / a n d \ w i t h t h e p l a t f o r m - s p e c i f i c p a t h
* separator .
*
* @ param pth The path to platformize .
* @ return string The platform - specific path .
* /
function toPlatformPath ( pth ) {
return pth . replace ( /[/\\]/g , path . sep ) ;
}
exports . toPlatformPath = toPlatformPath ;
//# sourceMappingURL=path-utils.js.map
/***/ } ) ,
/***/ 1327 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . summary = exports . markdownSummary = exports . SUMMARY _DOCS _URL = exports . SUMMARY _ENV _VAR = void 0 ;
const os _1 = _ _nccwpck _require _ _ ( 2037 ) ;
const fs _1 = _ _nccwpck _require _ _ ( 7147 ) ;
const { access , appendFile , writeFile } = fs _1 . promises ;
exports . SUMMARY _ENV _VAR = 'GITHUB_STEP_SUMMARY' ;
exports . SUMMARY _DOCS _URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary' ;
class Summary {
constructor ( ) {
this . _buffer = '' ;
}
/ * *
* Finds the summary file path from the environment , rejects if env var is not found or file does not exist
* Also checks r / w permissions .
*
* @ returns step summary file path
* /
filePath ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _filePath ) {
return this . _filePath ;
}
const pathFromEnv = process . env [ exports . SUMMARY _ENV _VAR ] ;
if ( ! pathFromEnv ) {
throw new Error ( ` Unable to find environment variable for $ ${ exports . SUMMARY _ENV _VAR } . Check if your runtime environment supports job summaries. ` ) ;
}
try {
yield access ( pathFromEnv , fs _1 . constants . R _OK | fs _1 . constants . W _OK ) ;
}
catch ( _a ) {
throw new Error ( ` Unable to access summary file: ' ${ pathFromEnv } '. Check if the file has correct read/write permissions. ` ) ;
}
this . _filePath = pathFromEnv ;
return this . _filePath ;
} ) ;
}
/ * *
* Wraps content in an HTML tag , adding any HTML attributes
*
* @ param { string } tag HTML tag to wrap
* @ param { string | null } content content within the tag
* @ param { [ attribute : string ] : string } attrs key - value list of HTML attributes to add
*
* @ returns { string } content wrapped in HTML element
* /
wrap ( tag , content , attrs = { } ) {
const htmlAttrs = Object . entries ( attrs )
. map ( ( [ key , value ] ) => ` ${ key } =" ${ value } " ` )
. join ( '' ) ;
if ( ! content ) {
return ` < ${ tag } ${ htmlAttrs } > ` ;
}
return ` < ${ tag } ${ htmlAttrs } > ${ content } </ ${ tag } > ` ;
}
/ * *
* Writes text in the buffer to the summary buffer file and empties buffer . Will append by default .
*
* @ param { SummaryWriteOptions } [ options ] ( optional ) options for write operation
*
* @ returns { Promise < Summary > } summary instance
* /
write ( options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const overwrite = ! ! ( options === null || options === void 0 ? void 0 : options . overwrite ) ;
const filePath = yield this . filePath ( ) ;
const writeFunc = overwrite ? writeFile : appendFile ;
yield writeFunc ( filePath , this . _buffer , { encoding : 'utf8' } ) ;
return this . emptyBuffer ( ) ;
} ) ;
}
/ * *
* Clears the summary buffer and wipes the summary file
*
* @ returns { Summary } summary instance
* /
clear ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . emptyBuffer ( ) . write ( { overwrite : true } ) ;
} ) ;
}
/ * *
* Returns the current summary buffer as a string
*
* @ returns { string } string of summary buffer
* /
stringify ( ) {
return this . _buffer ;
}
/ * *
* If the summary buffer is empty
*
* @ returns { boolen } true if the buffer is empty
* /
isEmptyBuffer ( ) {
return this . _buffer . length === 0 ;
}
/ * *
* Resets the summary buffer without writing to summary file
*
* @ returns { Summary } summary instance
* /
emptyBuffer ( ) {
this . _buffer = '' ;
return this ;
}
/ * *
* Adds raw text to the summary buffer
*
* @ param { string } text content to add
* @ param { boolean } [ addEOL = false ] ( optional ) append an EOL to the raw text ( default : false )
*
* @ returns { Summary } summary instance
* /
addRaw ( text , addEOL = false ) {
this . _buffer += text ;
return addEOL ? this . addEOL ( ) : this ;
}
/ * *
* Adds the operating system - specific end - of - line marker to the buffer
*
* @ returns { Summary } summary instance
* /
addEOL ( ) {
return this . addRaw ( os _1 . EOL ) ;
}
/ * *
* Adds an HTML codeblock to the summary buffer
*
* @ param { string } code content to render within fenced code block
* @ param { string } lang ( optional ) language to syntax highlight code
*
* @ returns { Summary } summary instance
* /
addCodeBlock ( code , lang ) {
const attrs = Object . assign ( { } , ( lang && { lang } ) ) ;
const element = this . wrap ( 'pre' , this . wrap ( 'code' , code ) , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML list to the summary buffer
*
* @ param { string [ ] } items list of items to render
* @ param { boolean } [ ordered = false ] ( optional ) if the rendered list should be ordered or not ( default : false )
*
* @ returns { Summary } summary instance
* /
addList ( items , ordered = false ) {
const tag = ordered ? 'ol' : 'ul' ;
const listItems = items . map ( item => this . wrap ( 'li' , item ) ) . join ( '' ) ;
const element = this . wrap ( tag , listItems ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML table to the summary buffer
*
* @ param { SummaryTableCell [ ] } rows table rows
*
* @ returns { Summary } summary instance
* /
addTable ( rows ) {
const tableBody = rows
. map ( row => {
const cells = row
. map ( cell => {
if ( typeof cell === 'string' ) {
return this . wrap ( 'td' , cell ) ;
}
const { header , data , colspan , rowspan } = cell ;
const tag = header ? 'th' : 'td' ;
const attrs = Object . assign ( Object . assign ( { } , ( colspan && { colspan } ) ) , ( rowspan && { rowspan } ) ) ;
return this . wrap ( tag , data , attrs ) ;
} )
. join ( '' ) ;
return this . wrap ( 'tr' , cells ) ;
} )
. join ( '' ) ;
const element = this . wrap ( 'table' , tableBody ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds a collapsable HTML details element to the summary buffer
*
* @ param { string } label text for the closed state
* @ param { string } content collapsable content
*
* @ returns { Summary } summary instance
* /
addDetails ( label , content ) {
const element = this . wrap ( 'details' , this . wrap ( 'summary' , label ) + content ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML image tag to the summary buffer
*
* @ param { string } src path to the image you to embed
* @ param { string } alt text description of the image
* @ param { SummaryImageOptions } options ( optional ) addition image attributes
*
* @ returns { Summary } summary instance
* /
addImage ( src , alt , options ) {
const { width , height } = options || { } ;
const attrs = Object . assign ( Object . assign ( { } , ( width && { width } ) ) , ( height && { height } ) ) ;
const element = this . wrap ( 'img' , null , Object . assign ( { src , alt } , attrs ) ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML section heading element
*
* @ param { string } text heading text
* @ param { number | string } [ level = 1 ] ( optional ) the heading level , default : 1
*
* @ returns { Summary } summary instance
* /
addHeading ( text , level ) {
const tag = ` h ${ level } ` ;
const allowedTag = [ 'h1' , 'h2' , 'h3' , 'h4' , 'h5' , 'h6' ] . includes ( tag )
? tag
: 'h1' ;
const element = this . wrap ( allowedTag , text ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML thematic break ( < hr > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addSeparator ( ) {
const element = this . wrap ( 'hr' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML line break ( < br > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addBreak ( ) {
const element = this . wrap ( 'br' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML blockquote to the summary buffer
*
* @ param { string } text quote text
* @ param { string } cite ( optional ) citation url
*
* @ returns { Summary } summary instance
* /
addQuote ( text , cite ) {
const attrs = Object . assign ( { } , ( cite && { cite } ) ) ;
const element = this . wrap ( 'blockquote' , text , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML anchor tag to the summary buffer
*
* @ param { string } text link text / content
* @ param { string } href hyperlink
*
* @ returns { Summary } summary instance
* /
addLink ( text , href ) {
const element = this . wrap ( 'a' , text , { href } ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
}
const _summary = new Summary ( ) ;
/ * *
* @ deprecated use ` core.summary `
* /
exports . markdownSummary = _summary ;
exports . summary = _summary ;
//# sourceMappingURL=summary.js.map
/***/ } ) ,
/***/ 5278 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . toCommandProperties = exports . toCommandValue = void 0 ;
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
/ * *
*
* @ param annotationProperties
* @ returns The command properties to send with the actual annotation command
* See IssueCommandProperties : https : //github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
* /
function toCommandProperties ( annotationProperties ) {
if ( ! Object . keys ( annotationProperties ) . length ) {
return { } ;
}
return {
title : annotationProperties . title ,
file : annotationProperties . file ,
line : annotationProperties . startLine ,
endLine : annotationProperties . endLine ,
col : annotationProperties . startColumn ,
endColumn : annotationProperties . endColumn
} ;
}
exports . toCommandProperties = toCommandProperties ;
//# sourceMappingURL=utils.js.map
/***/ } ) ,
/***/ 1514 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getExecOutput = exports . exec = void 0 ;
const string _decoder _1 = _ _nccwpck _require _ _ ( 1576 ) ;
const tr = _ _importStar ( _ _nccwpck _require _ _ ( 8159 ) ) ;
/ * *
* Exec a command .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < number > exit code
* /
function exec ( commandLine , args , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commandArgs = tr . argStringToArray ( commandLine ) ;
if ( commandArgs . length === 0 ) {
throw new Error ( ` Parameter 'commandLine' cannot be null or empty. ` ) ;
}
// Path to tool to execute should be first arg
const toolPath = commandArgs [ 0 ] ;
args = commandArgs . slice ( 1 ) . concat ( args || [ ] ) ;
const runner = new tr . ToolRunner ( toolPath , args , options ) ;
return runner . exec ( ) ;
} ) ;
}
exports . exec = exec ;
/ * *
* Exec a command and get the output .
* Output will be streamed to the live console .
* Returns promise with the exit code and collected stdout and stderr
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < ExecOutput > exit code , stdout , and stderr
* /
function getExecOutput ( commandLine , args , options ) {
var _a , _b ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stdout = '' ;
let stderr = '' ;
//Using string decoder covers the case where a mult-byte character is split
const stdoutDecoder = new string _decoder _1 . StringDecoder ( 'utf8' ) ;
const stderrDecoder = new string _decoder _1 . StringDecoder ( 'utf8' ) ;
const originalStdoutListener = ( _a = options === null || options === void 0 ? void 0 : options . listeners ) === null || _a === void 0 ? void 0 : _a . stdout ;
const originalStdErrListener = ( _b = options === null || options === void 0 ? void 0 : options . listeners ) === null || _b === void 0 ? void 0 : _b . stderr ;
const stdErrListener = ( data ) => {
stderr += stderrDecoder . write ( data ) ;
if ( originalStdErrListener ) {
originalStdErrListener ( data ) ;
}
} ;
const stdOutListener = ( data ) => {
stdout += stdoutDecoder . write ( data ) ;
if ( originalStdoutListener ) {
originalStdoutListener ( data ) ;
}
} ;
const listeners = Object . assign ( Object . assign ( { } , options === null || options === void 0 ? void 0 : options . listeners ) , { stdout : stdOutListener , stderr : stdErrListener } ) ;
const exitCode = yield exec ( commandLine , args , Object . assign ( Object . assign ( { } , options ) , { listeners } ) ) ;
//flush any remaining characters
stdout += stdoutDecoder . end ( ) ;
stderr += stderrDecoder . end ( ) ;
return {
exitCode ,
stdout ,
stderr
} ;
} ) ;
}
exports . getExecOutput = getExecOutput ;
//# sourceMappingURL=exec.js.map
/***/ } ) ,
/***/ 8159 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . argStringToArray = exports . ToolRunner = void 0 ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const events = _ _importStar ( _ _nccwpck _require _ _ ( 2361 ) ) ;
const child = _ _importStar ( _ _nccwpck _require _ _ ( 2081 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const ioUtil = _ _importStar ( _ _nccwpck _require _ _ ( 1962 ) ) ;
const timers _1 = _ _nccwpck _require _ _ ( 9512 ) ;
/* eslint-disable @typescript-eslint/unbound-method */
const IS _WINDOWS = process . platform === 'win32' ;
/ *
* Class for running command line tools . Handles quoting and arg parsing in a platform agnostic way .
* /
class ToolRunner extends events . EventEmitter {
constructor ( toolPath , args , options ) {
super ( ) ;
if ( ! toolPath ) {
throw new Error ( "Parameter 'toolPath' cannot be null or empty." ) ;
}
this . toolPath = toolPath ;
this . args = args || [ ] ;
this . options = options || { } ;
}
_debug ( message ) {
if ( this . options . listeners && this . options . listeners . debug ) {
this . options . listeners . debug ( message ) ;
}
}
_getCommandString ( options , noPrefix ) {
const toolPath = this . _getSpawnFileName ( ) ;
const args = this . _getSpawnArgs ( options ) ;
let cmd = noPrefix ? '' : '[command]' ; // omit prefix when piped to a second tool
if ( IS _WINDOWS ) {
// Windows + cmd file
if ( this . _isCmdFile ( ) ) {
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows + verbatim
else if ( options . windowsVerbatimArguments ) {
cmd += ` " ${ toolPath } " ` ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows (regular)
else {
cmd += this . _windowsQuoteCmdArg ( toolPath ) ;
for ( const a of args ) {
cmd += ` ${ this . _windowsQuoteCmdArg ( a ) } ` ;
}
}
}
else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
return cmd ;
}
_processLineBuffer ( data , strBuffer , onLine ) {
try {
let s = strBuffer + data . toString ( ) ;
let n = s . indexOf ( os . EOL ) ;
while ( n > - 1 ) {
const line = s . substring ( 0 , n ) ;
onLine ( line ) ;
// the rest of the string ...
s = s . substring ( n + os . EOL . length ) ;
n = s . indexOf ( os . EOL ) ;
}
return s ;
}
catch ( err ) {
// streaming lines to console is best effort. Don't fail a build.
this . _debug ( ` error processing line. Failed with error ${ err } ` ) ;
return '' ;
}
}
_getSpawnFileName ( ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
return process . env [ 'COMSPEC' ] || 'cmd.exe' ;
}
}
return this . toolPath ;
}
_getSpawnArgs ( options ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
let argline = ` /D /S /C " ${ this . _windowsQuoteCmdArg ( this . toolPath ) } ` ;
for ( const a of this . args ) {
argline += ' ' ;
argline += options . windowsVerbatimArguments
? a
: this . _windowsQuoteCmdArg ( a ) ;
}
argline += '"' ;
return [ argline ] ;
}
}
return this . args ;
}
_endsWith ( str , end ) {
return str . endsWith ( end ) ;
}
_isCmdFile ( ) {
const upperToolPath = this . toolPath . toUpperCase ( ) ;
return ( this . _endsWith ( upperToolPath , '.CMD' ) ||
this . _endsWith ( upperToolPath , '.BAT' ) ) ;
}
_windowsQuoteCmdArg ( arg ) {
// for .exe, apply the normal quoting rules that libuv applies
if ( ! this . _isCmdFile ( ) ) {
return this . _uvQuoteCmdArg ( arg ) ;
}
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if ( ! arg ) {
return '""' ;
}
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ' ,
'\t' ,
'&' ,
'(' ,
')' ,
'[' ,
']' ,
'{' ,
'}' ,
'^' ,
'=' ,
';' ,
'!' ,
"'" ,
'+' ,
',' ,
'`' ,
'~' ,
'|' ,
'<' ,
'>' ,
'"'
] ;
let needsQuotes = false ;
for ( const char of arg ) {
if ( cmdSpecialChars . some ( x => x === char ) ) {
needsQuotes = true ;
break ;
}
}
// short-circuit if quotes not needed
if ( ! needsQuotes ) {
return arg ;
}
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ; // double the slash
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '"' ; // double the quote
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_uvQuoteCmdArg ( arg ) {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if ( ! arg ) {
// Need double quotation for empty argument
return '""' ;
}
if ( ! arg . includes ( ' ' ) && ! arg . includes ( '\t' ) && ! arg . includes ( '"' ) ) {
// No quotation needed
return arg ;
}
if ( ! arg . includes ( '"' ) && ! arg . includes ( '\\' ) ) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return ` " ${ arg } " ` ;
}
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ;
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '\\' ;
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_cloneExecOptions ( options ) {
options = options || { } ;
const result = {
cwd : options . cwd || process . cwd ( ) ,
env : options . env || process . env ,
silent : options . silent || false ,
windowsVerbatimArguments : options . windowsVerbatimArguments || false ,
failOnStdErr : options . failOnStdErr || false ,
ignoreReturnCode : options . ignoreReturnCode || false ,
delay : options . delay || 10000
} ;
result . outStream = options . outStream || process . stdout ;
result . errStream = options . errStream || process . stderr ;
return result ;
}
_getSpawnOptions ( options , toolPath ) {
options = options || { } ;
const result = { } ;
result . cwd = options . cwd ;
result . env = options . env ;
result [ 'windowsVerbatimArguments' ] =
options . windowsVerbatimArguments || this . _isCmdFile ( ) ;
if ( options . windowsVerbatimArguments ) {
result . argv0 = ` " ${ toolPath } " ` ;
}
return result ;
}
/ * *
* Exec a tool .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param tool path to tool to exec
* @ param options optional exec options . See ExecOptions
* @ returns number
* /
exec ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// root the tool path if it is unrooted and contains relative pathing
if ( ! ioUtil . isRooted ( this . toolPath ) &&
( this . toolPath . includes ( '/' ) ||
( IS _WINDOWS && this . toolPath . includes ( '\\' ) ) ) ) {
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
this . toolPath = path . resolve ( process . cwd ( ) , this . options . cwd || process . cwd ( ) , this . toolPath ) ;
}
// if the tool is only a file name, then resolve it from the PATH
// otherwise verify it exists (add extension on Windows if necessary)
this . toolPath = yield io . which ( this . toolPath , true ) ;
return new Promise ( ( resolve , reject ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
this . _debug ( ` exec tool: ${ this . toolPath } ` ) ;
this . _debug ( 'arguments:' ) ;
for ( const arg of this . args ) {
this . _debug ( ` ${ arg } ` ) ;
}
const optionsNonNull = this . _cloneExecOptions ( this . options ) ;
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( this . _getCommandString ( optionsNonNull ) + os . EOL ) ;
}
const state = new ExecState ( optionsNonNull , this . toolPath ) ;
state . on ( 'debug' , ( message ) => {
this . _debug ( message ) ;
} ) ;
if ( this . options . cwd && ! ( yield ioUtil . exists ( this . options . cwd ) ) ) {
return reject ( new Error ( ` The cwd: ${ this . options . cwd } does not exist! ` ) ) ;
}
const fileName = this . _getSpawnFileName ( ) ;
const cp = child . spawn ( fileName , this . _getSpawnArgs ( optionsNonNull ) , this . _getSpawnOptions ( this . options , fileName ) ) ;
let stdbuffer = '' ;
if ( cp . stdout ) {
cp . stdout . on ( 'data' , ( data ) => {
if ( this . options . listeners && this . options . listeners . stdout ) {
this . options . listeners . stdout ( data ) ;
}
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( data ) ;
}
stdbuffer = this . _processLineBuffer ( data , stdbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . stdline ) {
this . options . listeners . stdline ( line ) ;
}
} ) ;
} ) ;
}
let errbuffer = '' ;
if ( cp . stderr ) {
cp . stderr . on ( 'data' , ( data ) => {
state . processStderr = true ;
if ( this . options . listeners && this . options . listeners . stderr ) {
this . options . listeners . stderr ( data ) ;
}
if ( ! optionsNonNull . silent &&
optionsNonNull . errStream &&
optionsNonNull . outStream ) {
const s = optionsNonNull . failOnStdErr
? optionsNonNull . errStream
: optionsNonNull . outStream ;
s . write ( data ) ;
}
errbuffer = this . _processLineBuffer ( data , errbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . errline ) {
this . options . listeners . errline ( line ) ;
}
} ) ;
} ) ;
}
cp . on ( 'error' , ( err ) => {
state . processError = err . message ;
state . processExited = true ;
state . processClosed = true ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'exit' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
this . _debug ( ` Exit code ${ code } received from tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'close' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
state . processClosed = true ;
this . _debug ( ` STDIO streams have closed for tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
state . on ( 'done' , ( error , exitCode ) => {
if ( stdbuffer . length > 0 ) {
this . emit ( 'stdline' , stdbuffer ) ;
}
if ( errbuffer . length > 0 ) {
this . emit ( 'errline' , errbuffer ) ;
}
cp . removeAllListeners ( ) ;
if ( error ) {
reject ( error ) ;
}
else {
resolve ( exitCode ) ;
}
} ) ;
if ( this . options . input ) {
if ( ! cp . stdin ) {
throw new Error ( 'child process missing stdin' ) ;
}
cp . stdin . end ( this . options . input ) ;
}
} ) ) ;
} ) ;
}
}
exports . ToolRunner = ToolRunner ;
/ * *
* Convert an arg string to an array of args . Handles escaping
*
* @ param argString string of arguments
* @ returns string [ ] array of arguments
* /
function argStringToArray ( argString ) {
const args = [ ] ;
let inQuotes = false ;
let escaped = false ;
let arg = '' ;
function append ( c ) {
// we only escape double quotes.
if ( escaped && c !== '"' ) {
arg += '\\' ;
}
arg += c ;
escaped = false ;
}
for ( let i = 0 ; i < argString . length ; i ++ ) {
const c = argString . charAt ( i ) ;
if ( c === '"' ) {
if ( ! escaped ) {
inQuotes = ! inQuotes ;
}
else {
append ( c ) ;
}
continue ;
}
if ( c === '\\' && escaped ) {
append ( c ) ;
continue ;
}
if ( c === '\\' && inQuotes ) {
escaped = true ;
continue ;
}
if ( c === ' ' && ! inQuotes ) {
if ( arg . length > 0 ) {
args . push ( arg ) ;
arg = '' ;
}
continue ;
}
append ( c ) ;
}
if ( arg . length > 0 ) {
args . push ( arg . trim ( ) ) ;
}
return args ;
}
exports . argStringToArray = argStringToArray ;
class ExecState extends events . EventEmitter {
constructor ( options , toolPath ) {
super ( ) ;
this . processClosed = false ; // tracks whether the process has exited and stdio is closed
this . processError = '' ;
this . processExitCode = 0 ;
this . processExited = false ; // tracks whether the process has exited
this . processStderr = false ; // tracks whether stderr was written to
this . delay = 10000 ; // 10 seconds
this . done = false ;
this . timeout = null ;
if ( ! toolPath ) {
throw new Error ( 'toolPath must not be empty' ) ;
}
this . options = options ;
this . toolPath = toolPath ;
if ( options . delay ) {
this . delay = options . delay ;
}
}
CheckComplete ( ) {
if ( this . done ) {
return ;
}
if ( this . processClosed ) {
this . _setResult ( ) ;
}
else if ( this . processExited ) {
this . timeout = timers _1 . setTimeout ( ExecState . HandleTimeout , this . delay , this ) ;
}
}
_debug ( message ) {
this . emit ( 'debug' , message ) ;
}
_setResult ( ) {
// determine whether there is an error
let error ;
if ( this . processExited ) {
if ( this . processError ) {
error = new Error ( ` There was an error when attempting to execute the process ' ${ this . toolPath } '. This may indicate the process failed to start. Error: ${ this . processError } ` ) ;
}
else if ( this . processExitCode !== 0 && ! this . options . ignoreReturnCode ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed with exit code ${ this . processExitCode } ` ) ;
}
else if ( this . processStderr && this . options . failOnStdErr ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed because one or more lines were written to the STDERR stream ` ) ;
}
}
// clear the timeout
if ( this . timeout ) {
clearTimeout ( this . timeout ) ;
this . timeout = null ;
}
this . done = true ;
this . emit ( 'done' , error , this . processExitCode ) ;
}
static HandleTimeout ( state ) {
if ( state . done ) {
return ;
}
if ( ! state . processClosed && state . processExited ) {
const message = ` The STDIO streams did not close within ${ state . delay /
1000 } seconds of the exit event from process '${state.toolPath}' . This may indicate a child process inherited the STDIO streams and has not yet exited . ` ;
state . _debug ( message ) ;
}
state . _setResult ( ) ;
}
}
//# sourceMappingURL=toolrunner.js.map
/***/ } ) ,
/***/ 1063 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . MatchKind = void 0 ;
/ * *
* Indicates whether a pattern matches a path
* /
var MatchKind ;
( function ( MatchKind ) {
/** Not matched */
MatchKind [ MatchKind [ "None" ] = 0 ] = "None" ;
/** Matched if the path is a directory */
MatchKind [ MatchKind [ "Directory" ] = 1 ] = "Directory" ;
/** Matched if the path is a regular file */
MatchKind [ MatchKind [ "File" ] = 2 ] = "File" ;
/** Matched */
MatchKind [ MatchKind [ "All" ] = 3 ] = "All" ;
} ) ( MatchKind = exports . MatchKind || ( exports . MatchKind = { } ) ) ;
//# sourceMappingURL=internal-match-kind.js.map
/***/ } ) ,
/***/ 1849 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . safeTrimTrailingSeparator = exports . normalizeSeparators = exports . hasRoot = exports . hasAbsoluteRoot = exports . ensureAbsoluteRoot = exports . dirname = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const assert _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Similar to path . dirname except normalizes the path separators and slightly better handling for Windows UNC paths .
*
* For example , on Linux / macOS :
* - ` / => / `
* - ` /hello => / `
*
* For example , on Windows :
* - ` C: \ => C: \`
* - ` C: \h ello => C: \`
* - ` C: => C: `
* - ` C:hello => C: `
* - ` \ => \`
* - ` \h ello => \`
* - ` \\ hello => \\ hello `
* - ` \\ hello \w orld => \\ hello \w orld `
* /
function dirname ( p ) {
// Normalize slashes and trim unnecessary trailing slash
p = safeTrimTrailingSeparator ( p ) ;
// Windows UNC root, e.g. \\hello or \\hello\world
if ( IS _WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/ . test ( p ) ) {
return p ;
}
// Get dirname
let result = path . dirname ( p ) ;
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
if ( IS _WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/ . test ( result ) ) {
result = safeTrimTrailingSeparator ( result ) ;
}
return result ;
}
exports . dirname = dirname ;
/ * *
* Roots the path if not already rooted . On Windows , relative roots like ` \`
* or ` C: ` are expanded based on the current working directory .
* /
function ensureAbsoluteRoot ( root , itemPath ) {
assert _1 . default ( root , ` ensureAbsoluteRoot parameter 'root' must not be empty ` ) ;
assert _1 . default ( itemPath , ` ensureAbsoluteRoot parameter 'itemPath' must not be empty ` ) ;
// Already rooted
if ( hasAbsoluteRoot ( itemPath ) ) {
return itemPath ;
}
// Windows
if ( IS _WINDOWS ) {
// Check for itemPath like C: or C:foo
if ( itemPath . match ( /^[A-Z]:[^\\/]|^[A-Z]:$/i ) ) {
let cwd = process . cwd ( ) ;
assert _1 . default ( cwd . match ( /^[A-Z]:\\/i ) , ` Expected current directory to start with an absolute drive root. Actual ' ${ cwd } ' ` ) ;
// Drive letter matches cwd? Expand to cwd
if ( itemPath [ 0 ] . toUpperCase ( ) === cwd [ 0 ] . toUpperCase ( ) ) {
// Drive only, e.g. C:
if ( itemPath . length === 2 ) {
// Preserve specified drive letter case (upper or lower)
return ` ${ itemPath [ 0 ] } : \\ ${ cwd . substr ( 3 ) } ` ;
}
// Drive + path, e.g. C:foo
else {
if ( ! cwd . endsWith ( '\\' ) ) {
cwd += '\\' ;
}
// Preserve specified drive letter case (upper or lower)
return ` ${ itemPath [ 0 ] } : \\ ${ cwd . substr ( 3 ) } ${ itemPath . substr ( 2 ) } ` ;
}
}
// Different drive
else {
return ` ${ itemPath [ 0 ] } : \\ ${ itemPath . substr ( 2 ) } ` ;
}
}
// Check for itemPath like \ or \foo
else if ( normalizeSeparators ( itemPath ) . match ( /^\\$|^\\[^\\]/ ) ) {
const cwd = process . cwd ( ) ;
assert _1 . default ( cwd . match ( /^[A-Z]:\\/i ) , ` Expected current directory to start with an absolute drive root. Actual ' ${ cwd } ' ` ) ;
return ` ${ cwd [ 0 ] } : \\ ${ itemPath . substr ( 1 ) } ` ;
}
}
assert _1 . default ( hasAbsoluteRoot ( root ) , ` ensureAbsoluteRoot parameter 'root' must have an absolute root ` ) ;
// Otherwise ensure root ends with a separator
if ( root . endsWith ( '/' ) || ( IS _WINDOWS && root . endsWith ( '\\' ) ) ) {
// Intentionally empty
}
else {
// Append separator
root += path . sep ;
}
return root + itemPath ;
}
exports . ensureAbsoluteRoot = ensureAbsoluteRoot ;
/ * *
* On Linux / macOS , true if path starts with ` / ` . On Windows , true for paths like :
* ` \\ hello \s hare ` and ` C: \h ello ` ( and using alternate separator ) .
* /
function hasAbsoluteRoot ( itemPath ) {
assert _1 . default ( itemPath , ` hasAbsoluteRoot parameter 'itemPath' must not be empty ` ) ;
// Normalize separators
itemPath = normalizeSeparators ( itemPath ) ;
// Windows
if ( IS _WINDOWS ) {
// E.g. \\hello\share or C:\hello
return itemPath . startsWith ( '\\\\' ) || /^[A-Z]:\\/i . test ( itemPath ) ;
}
// E.g. /hello
return itemPath . startsWith ( '/' ) ;
}
exports . hasAbsoluteRoot = hasAbsoluteRoot ;
/ * *
* On Linux / macOS , true if path starts with ` / ` . On Windows , true for paths like :
* ` \` , ` \ hello ` , ` \ \ hello \ share ` , ` C : ` , and ` C : \ hello ` (and using alternate separator).
* /
function hasRoot ( itemPath ) {
assert _1 . default ( itemPath , ` isRooted parameter 'itemPath' must not be empty ` ) ;
// Normalize separators
itemPath = normalizeSeparators ( itemPath ) ;
// Windows
if ( IS _WINDOWS ) {
// E.g. \ or \hello or \\hello
// E.g. C: or C:\hello
return itemPath . startsWith ( '\\' ) || /^[A-Z]:/i . test ( itemPath ) ;
}
// E.g. /hello
return itemPath . startsWith ( '/' ) ;
}
exports . hasRoot = hasRoot ;
/ * *
* Removes redundant slashes and converts ` / ` to ` \` on Windows
* /
function normalizeSeparators ( p ) {
p = p || '' ;
// Windows
if ( IS _WINDOWS ) {
// Convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// Remove redundant slashes
const isUnc = /^\\\\+[^\\]/ . test ( p ) ; // e.g. \\hello
return ( isUnc ? '\\' : '' ) + p . replace ( /\\\\+/g , '\\' ) ; // preserve leading \\ for UNC
}
// Remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
}
exports . normalizeSeparators = normalizeSeparators ;
/ * *
* Normalizes the path separators and trims the trailing separator ( when safe ) .
* For example , ` /foo/ => /foo ` but ` / => / `
* /
function safeTrimTrailingSeparator ( p ) {
// Short-circuit if empty
if ( ! p ) {
return '' ;
}
// Normalize separators
p = normalizeSeparators ( p ) ;
// No trailing slash
if ( ! p . endsWith ( path . sep ) ) {
return p ;
}
// Check '/' on Linux/macOS and '\' on Windows
if ( p === path . sep ) {
return p ;
}
// On Windows check if drive root. E.g. C:\
if ( IS _WINDOWS && /^[A-Z]:\\$/i . test ( p ) ) {
return p ;
}
// Otherwise trim trailing slash
return p . substr ( 0 , p . length - 1 ) ;
}
exports . safeTrimTrailingSeparator = safeTrimTrailingSeparator ;
//# sourceMappingURL=internal-path-helper.js.map
/***/ } ) ,
/***/ 6836 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . Path = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const pathHelper = _ _importStar ( _ _nccwpck _require _ _ ( 1849 ) ) ;
const assert _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Helper class for parsing paths into segments
* /
class Path {
/ * *
* Constructs a Path
* @ param itemPath Path or array of segments
* /
constructor ( itemPath ) {
this . segments = [ ] ;
// String
if ( typeof itemPath === 'string' ) {
assert _1 . default ( itemPath , ` Parameter 'itemPath' must not be empty ` ) ;
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
// Not rooted
if ( ! pathHelper . hasRoot ( itemPath ) ) {
this . segments = itemPath . split ( path . sep ) ;
}
// Rooted
else {
// Add all segments, while not at the root
let remaining = itemPath ;
let dir = pathHelper . dirname ( remaining ) ;
while ( dir !== remaining ) {
// Add the segment
const basename = path . basename ( remaining ) ;
this . segments . unshift ( basename ) ;
// Truncate the last segment
remaining = dir ;
dir = pathHelper . dirname ( remaining ) ;
}
// Remainder is the root
this . segments . unshift ( remaining ) ;
}
}
// Array
else {
// Must not be empty
assert _1 . default ( itemPath . length > 0 , ` Parameter 'itemPath' must not be an empty array ` ) ;
// Each segment
for ( let i = 0 ; i < itemPath . length ; i ++ ) {
let segment = itemPath [ i ] ;
// Must not be empty
assert _1 . default ( segment , ` Parameter 'itemPath' must not contain any empty segments ` ) ;
// Normalize slashes
segment = pathHelper . normalizeSeparators ( itemPath [ i ] ) ;
// Root segment
if ( i === 0 && pathHelper . hasRoot ( segment ) ) {
segment = pathHelper . safeTrimTrailingSeparator ( segment ) ;
assert _1 . default ( segment === pathHelper . dirname ( segment ) , ` Parameter 'itemPath' root segment contains information for multiple segments ` ) ;
this . segments . push ( segment ) ;
}
// All other segments
else {
// Must not contain slash
assert _1 . default ( ! segment . includes ( path . sep ) , ` Parameter 'itemPath' contains unexpected path separators ` ) ;
this . segments . push ( segment ) ;
}
}
}
}
/ * *
* Converts the path to it ' s string representation
* /
toString ( ) {
// First segment
let result = this . segments [ 0 ] ;
// All others
let skipSlash = result . endsWith ( path . sep ) || ( IS _WINDOWS && /^[A-Z]:$/i . test ( result ) ) ;
for ( let i = 1 ; i < this . segments . length ; i ++ ) {
if ( skipSlash ) {
skipSlash = false ;
}
else {
result += path . sep ;
}
result += this . segments [ i ] ;
}
return result ;
}
}
exports . Path = Path ;
//# sourceMappingURL=internal-path.js.map
/***/ } ) ,
/***/ 9005 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . partialMatch = exports . match = exports . getSearchPaths = void 0 ;
const pathHelper = _ _importStar ( _ _nccwpck _require _ _ ( 1849 ) ) ;
const internal _match _kind _1 = _ _nccwpck _require _ _ ( 1063 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Given an array of patterns , returns an array of paths to search .
* Duplicates and paths under other included paths are filtered out .
* /
function getSearchPaths ( patterns ) {
// Ignore negate patterns
patterns = patterns . filter ( x => ! x . negate ) ;
// Create a map of all search paths
const searchPathMap = { } ;
for ( const pattern of patterns ) {
const key = IS _WINDOWS
? pattern . searchPath . toUpperCase ( )
: pattern . searchPath ;
searchPathMap [ key ] = 'candidate' ;
}
const result = [ ] ;
for ( const pattern of patterns ) {
// Check if already included
const key = IS _WINDOWS
? pattern . searchPath . toUpperCase ( )
: pattern . searchPath ;
if ( searchPathMap [ key ] === 'included' ) {
continue ;
}
// Check for an ancestor search path
let foundAncestor = false ;
let tempKey = key ;
let parent = pathHelper . dirname ( tempKey ) ;
while ( parent !== tempKey ) {
if ( searchPathMap [ parent ] ) {
foundAncestor = true ;
break ;
}
tempKey = parent ;
parent = pathHelper . dirname ( tempKey ) ;
}
// Include the search pattern in the result
if ( ! foundAncestor ) {
result . push ( pattern . searchPath ) ;
searchPathMap [ key ] = 'included' ;
}
}
return result ;
}
exports . getSearchPaths = getSearchPaths ;
/ * *
* Matches the patterns against the path
* /
function match ( patterns , itemPath ) {
let result = internal _match _kind _1 . MatchKind . None ;
for ( const pattern of patterns ) {
if ( pattern . negate ) {
result &= ~ pattern . match ( itemPath ) ;
}
else {
result |= pattern . match ( itemPath ) ;
}
}
return result ;
}
exports . match = match ;
/ * *
* Checks whether to descend further into the directory
* /
function partialMatch ( patterns , itemPath ) {
return patterns . some ( x => ! x . negate && x . partialMatch ( itemPath ) ) ;
}
exports . partialMatch = partialMatch ;
//# sourceMappingURL=internal-pattern-helper.js.map
/***/ } ) ,
/***/ 4536 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . Pattern = void 0 ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const pathHelper = _ _importStar ( _ _nccwpck _require _ _ ( 1849 ) ) ;
const assert _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const minimatch _1 = _ _nccwpck _require _ _ ( 3973 ) ;
const internal _match _kind _1 = _ _nccwpck _require _ _ ( 1063 ) ;
const internal _path _1 = _ _nccwpck _require _ _ ( 6836 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
class Pattern {
constructor ( patternOrNegate , isImplicitPattern = false , segments , homedir ) {
/ * *
* Indicates whether matches should be excluded from the result set
* /
this . negate = false ;
// Pattern overload
let pattern ;
if ( typeof patternOrNegate === 'string' ) {
pattern = patternOrNegate . trim ( ) ;
}
// Segments overload
else {
// Convert to pattern
segments = segments || [ ] ;
assert _1 . default ( segments . length , ` Parameter 'segments' must not empty ` ) ;
const root = Pattern . getLiteral ( segments [ 0 ] ) ;
assert _1 . default ( root && pathHelper . hasAbsoluteRoot ( root ) , ` Parameter 'segments' first element must be a root path ` ) ;
pattern = new internal _path _1 . Path ( segments ) . toString ( ) . trim ( ) ;
if ( patternOrNegate ) {
pattern = ` ! ${ pattern } ` ;
}
}
// Negate
while ( pattern . startsWith ( '!' ) ) {
this . negate = ! this . negate ;
pattern = pattern . substr ( 1 ) . trim ( ) ;
}
// Normalize slashes and ensures absolute root
pattern = Pattern . fixupPattern ( pattern , homedir ) ;
// Segments
this . segments = new internal _path _1 . Path ( pattern ) . segments ;
// Trailing slash indicates the pattern should only match directories, not regular files
this . trailingSeparator = pathHelper
. normalizeSeparators ( pattern )
. endsWith ( path . sep ) ;
pattern = pathHelper . safeTrimTrailingSeparator ( pattern ) ;
// Search path (literal path prior to the first glob segment)
let foundGlob = false ;
const searchSegments = this . segments
. map ( x => Pattern . getLiteral ( x ) )
. filter ( x => ! foundGlob && ! ( foundGlob = x === '' ) ) ;
this . searchPath = new internal _path _1 . Path ( searchSegments ) . toString ( ) ;
// Root RegExp (required when determining partial match)
this . rootRegExp = new RegExp ( Pattern . regExpEscape ( searchSegments [ 0 ] ) , IS _WINDOWS ? 'i' : '' ) ;
this . isImplicitPattern = isImplicitPattern ;
// Create minimatch
const minimatchOptions = {
dot : true ,
nobrace : true ,
nocase : IS _WINDOWS ,
nocomment : true ,
noext : true ,
nonegate : true
} ;
pattern = IS _WINDOWS ? pattern . replace ( /\\/g , '/' ) : pattern ;
this . minimatch = new minimatch _1 . Minimatch ( pattern , minimatchOptions ) ;
}
/ * *
* Matches the pattern against the specified path
* /
match ( itemPath ) {
// Last segment is globstar?
if ( this . segments [ this . segments . length - 1 ] === '**' ) {
// Normalize slashes
itemPath = pathHelper . normalizeSeparators ( itemPath ) ;
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
if ( ! itemPath . endsWith ( path . sep ) && this . isImplicitPattern === false ) {
// Note, this is safe because the constructor ensures the pattern has an absolute root.
// For example, formats like C: and C:foo on Windows are resolved to an absolute root.
itemPath = ` ${ itemPath } ${ path . sep } ` ;
}
}
else {
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
}
// Match
if ( this . minimatch . match ( itemPath ) ) {
return this . trailingSeparator ? internal _match _kind _1 . MatchKind . Directory : internal _match _kind _1 . MatchKind . All ;
}
return internal _match _kind _1 . MatchKind . None ;
}
/ * *
* Indicates whether the pattern may match descendants of the specified path
* /
partialMatch ( itemPath ) {
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
// matchOne does not handle root path correctly
if ( pathHelper . dirname ( itemPath ) === itemPath ) {
return this . rootRegExp . test ( itemPath ) ;
}
return this . minimatch . matchOne ( itemPath . split ( IS _WINDOWS ? /\\+/ : /\/+/ ) , this . minimatch . set [ 0 ] , true ) ;
}
/ * *
* Escapes glob patterns within a path
* /
static globEscape ( s ) {
return ( IS _WINDOWS ? s : s . replace ( /\\/g , '\\\\' ) ) // escape '\' on Linux/macOS
. replace ( /(\[)(?=[^/]+\])/g , '[[]' ) // escape '[' when ']' follows within the path segment
. replace ( /\?/g , '[?]' ) // escape '?'
. replace ( /\*/g , '[*]' ) ; // escape '*'
}
/ * *
* Normalizes slashes and ensures absolute root
* /
static fixupPattern ( pattern , homedir ) {
// Empty
assert _1 . default ( pattern , 'pattern cannot be empty' ) ;
// Must not contain `.` segment, unless first segment
// Must not contain `..` segment
const literalSegments = new internal _path _1 . Path ( pattern ) . segments . map ( x => Pattern . getLiteral ( x ) ) ;
assert _1 . default ( literalSegments . every ( ( x , i ) => ( x !== '.' || i === 0 ) && x !== '..' ) , ` Invalid pattern ' ${ pattern } '. Relative pathing '.' and '..' is not allowed. ` ) ;
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
assert _1 . default ( ! pathHelper . hasRoot ( pattern ) || literalSegments [ 0 ] , ` Invalid pattern ' ${ pattern } '. Root segment must not contain globs. ` ) ;
// Normalize slashes
pattern = pathHelper . normalizeSeparators ( pattern ) ;
// Replace leading `.` segment
if ( pattern === '.' || pattern . startsWith ( ` . ${ path . sep } ` ) ) {
pattern = Pattern . globEscape ( process . cwd ( ) ) + pattern . substr ( 1 ) ;
}
// Replace leading `~` segment
else if ( pattern === '~' || pattern . startsWith ( ` ~ ${ path . sep } ` ) ) {
homedir = homedir || os . homedir ( ) ;
assert _1 . default ( homedir , 'Unable to determine HOME directory' ) ;
assert _1 . default ( pathHelper . hasAbsoluteRoot ( homedir ) , ` Expected HOME directory to be a rooted path. Actual ' ${ homedir } ' ` ) ;
pattern = Pattern . globEscape ( homedir ) + pattern . substr ( 1 ) ;
}
// Replace relative drive root, e.g. pattern is C: or C:foo
else if ( IS _WINDOWS &&
( pattern . match ( /^[A-Z]:$/i ) || pattern . match ( /^[A-Z]:[^\\]/i ) ) ) {
let root = pathHelper . ensureAbsoluteRoot ( 'C:\\dummy-root' , pattern . substr ( 0 , 2 ) ) ;
if ( pattern . length > 2 && ! root . endsWith ( '\\' ) ) {
root += '\\' ;
}
pattern = Pattern . globEscape ( root ) + pattern . substr ( 2 ) ;
}
// Replace relative root, e.g. pattern is \ or \foo
else if ( IS _WINDOWS && ( pattern === '\\' || pattern . match ( /^\\[^\\]/ ) ) ) {
let root = pathHelper . ensureAbsoluteRoot ( 'C:\\dummy-root' , '\\' ) ;
if ( ! root . endsWith ( '\\' ) ) {
root += '\\' ;
}
pattern = Pattern . globEscape ( root ) + pattern . substr ( 1 ) ;
}
// Otherwise ensure absolute root
else {
pattern = pathHelper . ensureAbsoluteRoot ( Pattern . globEscape ( process . cwd ( ) ) , pattern ) ;
}
return pathHelper . normalizeSeparators ( pattern ) ;
}
/ * *
* Attempts to unescape a pattern segment to create a literal path segment .
* Otherwise returns empty string .
* /
static getLiteral ( segment ) {
let literal = '' ;
for ( let i = 0 ; i < segment . length ; i ++ ) {
const c = segment [ i ] ;
// Escape
if ( c === '\\' && ! IS _WINDOWS && i + 1 < segment . length ) {
literal += segment [ ++ i ] ;
continue ;
}
// Wildcard
else if ( c === '*' || c === '?' ) {
return '' ;
}
// Character set
else if ( c === '[' && i + 1 < segment . length ) {
let set = '' ;
let closed = - 1 ;
for ( let i2 = i + 1 ; i2 < segment . length ; i2 ++ ) {
const c2 = segment [ i2 ] ;
// Escape
if ( c2 === '\\' && ! IS _WINDOWS && i2 + 1 < segment . length ) {
set += segment [ ++ i2 ] ;
continue ;
}
// Closed
else if ( c2 === ']' ) {
closed = i2 ;
break ;
}
// Otherwise
else {
set += c2 ;
}
}
// Closed?
if ( closed >= 0 ) {
// Cannot convert
if ( set . length > 1 ) {
return '' ;
}
// Convert to literal
if ( set ) {
literal += set ;
i = closed ;
continue ;
}
}
// Otherwise fall thru
}
// Append
literal += c ;
}
return literal ;
}
/ * *
* Escapes regexp special characters
* https : //javascript.info/regexp-escaping
* /
static regExpEscape ( s ) {
return s . replace ( /[[\\^$.|?*+()]/g , '\\$&' ) ;
}
}
exports . Pattern = Pattern ;
//# sourceMappingURL=internal-pattern.js.map
/***/ } ) ,
/***/ 5526 :
/***/ ( function ( _ _unused _webpack _module , exports ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . PersonalAccessTokenCredentialHandler = exports . BearerCredentialHandler = exports . BasicCredentialHandler = void 0 ;
class BasicCredentialHandler {
constructor ( username , password ) {
this . username = username ;
this . password = password ;
}
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` ${ this . username } : ${ this . password } ` ) . toString ( 'base64' ) } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
}
exports . BasicCredentialHandler = BasicCredentialHandler ;
class BearerCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Bearer ${ this . token } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
}
exports . BearerCredentialHandler = BearerCredentialHandler ;
class PersonalAccessTokenCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` PAT: ${ this . token } ` ) . toString ( 'base64' ) } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
}
exports . PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler ;
//# sourceMappingURL=auth.js.map
/***/ } ) ,
/***/ 6255 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
/* eslint-disable @typescript-eslint/no-explicit-any */
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . HttpClient = exports . isHttps = exports . HttpClientResponse = exports . HttpClientError = exports . getProxyUrl = exports . MediaTypes = exports . Headers = exports . HttpCodes = void 0 ;
const http = _ _importStar ( _ _nccwpck _require _ _ ( 3685 ) ) ;
const https = _ _importStar ( _ _nccwpck _require _ _ ( 5687 ) ) ;
const pm = _ _importStar ( _ _nccwpck _require _ _ ( 9835 ) ) ;
const tunnel = _ _importStar ( _ _nccwpck _require _ _ ( 4294 ) ) ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
/ * *
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
function getProxyUrl ( serverUrl ) {
const proxyUrl = pm . getProxyUrl ( new URL ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
}
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientError extends Error {
constructor ( message , statusCode ) {
super ( message ) ;
this . name = 'HttpClientError' ;
this . statusCode = statusCode ;
Object . setPrototypeOf ( this , HttpClientError . prototype ) ;
}
}
exports . HttpClientError = HttpClientError ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ) ;
} ) ;
}
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
const parsedUrl = new URL ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
}
}
options ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
get ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
del ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
head ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
} ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
getJson ( requestUrl , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
const res = yield this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
postJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
putJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
patchJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
request ( verb , requestUrl , data , headers ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
}
const parsedUrl = new URL ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
const maxTries = this . _allowRetries && RetryableHttpVerbs . includes ( verb )
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
do {
response = yield this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( const handler of this . handlers ) {
if ( handler . canHandleAuthentication ( response ) ) {
authenticationHandler = handler ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
while ( response . message . statusCode &&
HttpRedirectCodes . includes ( response . message . statusCode ) &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
const parsedRedirectUrl = new URL ( redirectUrl ) ;
if ( parsedUrl . protocol === 'https:' &&
parsedUrl . protocol !== parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
yield response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( const header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = yield this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( ! response . message . statusCode ||
! HttpResponseRetryCodes . includes ( response . message . statusCode ) ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
yield response . readBody ( ) ;
yield this . _performExponentialBackoff ( numTries ) ;
}
} while ( numTries < maxTries ) ;
return response ;
} ) ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => {
function callbackForResult ( err , res ) {
if ( err ) {
reject ( err ) ;
}
else if ( ! res ) {
// If `err` is not passed, then `res` must be passed.
reject ( new Error ( 'Unknown error' ) ) ;
}
else {
resolve ( res ) ;
}
}
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
if ( typeof data === 'string' ) {
if ( ! info . options . headers ) {
info . options . headers = { } ;
}
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
}
let callbackCalled = false ;
function handleResult ( err , res ) {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
}
}
const req = info . httpModule . request ( info . options , ( msg ) => {
const res = new HttpClientResponse ( msg ) ;
handleResult ( undefined , res ) ;
} ) ;
let socket ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( ` Request timeout: ${ info . options . path } ` ) ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
}
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
const parsedUrl = new URL ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
for ( const handler of this . handlers ) {
handler . prepareRequest ( info . options ) ;
}
}
return info ;
}
_mergeHeaders ( headers ) {
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers || { } ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
const proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
const useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
if ( proxyUrl && proxyUrl . hostname ) {
const agentOptions = {
maxSockets ,
keepAlive : this . _keepAlive ,
proxy : Object . assign ( Object . assign ( { } , ( ( proxyUrl . username || proxyUrl . password ) && {
proxyAuth : ` ${ proxyUrl . username } : ${ proxyUrl . password } `
} ) ) , { host : proxyUrl . hostname , port : proxyUrl . port } )
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
}
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
}
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
} ) ;
}
_processResponse ( res , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const statusCode = res . message . statusCode || 0 ;
const response = {
statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode === HttpCodes . NotFound ) {
resolve ( response ) ;
}
// get the result from the body
function dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
const a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
let obj ;
let contents ;
try {
contents = yield res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = ` Failed request: ( ${ statusCode } ) ` ;
}
const err = new HttpClientError ( msg , statusCode ) ;
err . result = response . result ;
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ) ;
} ) ;
}
}
exports . HttpClient = HttpClient ;
const lowercaseKeys = ( obj ) => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 9835 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . checkBypass = exports . getProxyUrl = void 0 ;
function getProxyUrl ( reqUrl ) {
const usingSsl = reqUrl . protocol === 'https:' ;
if ( checkBypass ( reqUrl ) ) {
return undefined ;
}
const proxyVar = ( ( ) => {
if ( usingSsl ) {
return process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
return process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
} ) ( ) ;
if ( proxyVar ) {
return new URL ( proxyVar ) ;
}
else {
return undefined ;
}
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
const reqHost = reqUrl . hostname ;
if ( isLoopbackAddress ( reqHost ) ) {
return true ;
}
const noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
const upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
for ( const upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperNoProxyItem === '*' ||
upperReqHosts . some ( x => x === upperNoProxyItem ||
x . endsWith ( ` . ${ upperNoProxyItem } ` ) ||
( upperNoProxyItem . startsWith ( '.' ) &&
x . endsWith ( ` ${ upperNoProxyItem } ` ) ) ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
function isLoopbackAddress ( host ) {
const hostLower = host . toLowerCase ( ) ;
return ( hostLower === 'localhost' ||
hostLower . startsWith ( '127.' ) ||
hostLower . startsWith ( '[::1]' ) ||
hostLower . startsWith ( '[0:0:0:0:0:0:0:1]' ) ) ;
}
//# sourceMappingURL=proxy.js.map
/***/ } ) ,
/***/ 1962 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _a ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getCmdPath = exports . tryGetExecutablePath = exports . isRooted = exports . isDirectory = exports . exists = exports . READONLY = exports . UV _FS _O _EXLOCK = exports . IS _WINDOWS = exports . unlink = exports . symlink = exports . stat = exports . rmdir = exports . rm = exports . rename = exports . readlink = exports . readdir = exports . open = exports . mkdir = exports . lstat = exports . copyFile = exports . chmod = void 0 ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
_a = fs . promises
// export const {open} = 'fs'
, exports . chmod = _a . chmod , exports . copyFile = _a . copyFile , exports . lstat = _a . lstat , exports . mkdir = _a . mkdir , exports . open = _a . open , exports . readdir = _a . readdir , exports . readlink = _a . readlink , exports . rename = _a . rename , exports . rm = _a . rm , exports . rmdir = _a . rmdir , exports . stat = _a . stat , exports . symlink = _a . symlink , exports . unlink = _a . unlink ;
// export const {open} = 'fs'
exports . IS _WINDOWS = process . platform === 'win32' ;
// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691
exports . UV _FS _O _EXLOCK = 0x10000000 ;
exports . READONLY = fs . constants . O _RDONLY ;
function exists ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exports . stat ( fsPath ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
return false ;
}
throw err ;
}
return true ;
} ) ;
}
exports . exists = exists ;
function isDirectory ( fsPath , useStat = false ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = useStat ? yield exports . stat ( fsPath ) : yield exports . lstat ( fsPath ) ;
return stats . isDirectory ( ) ;
} ) ;
}
exports . isDirectory = isDirectory ;
/ * *
* On OSX / Linux , true if path starts with '/' . On Windows , true for paths like :
* \ , \ hello , \ \ hello \ share , C : , and C : \ hello ( and corresponding alternate separator cases ) .
* /
function isRooted ( p ) {
p = normalizeSeparators ( p ) ;
if ( ! p ) {
throw new Error ( 'isRooted() parameter "p" cannot be empty' ) ;
}
if ( exports . IS _WINDOWS ) {
return ( p . startsWith ( '\\' ) || /^[A-Z]:/i . test ( p ) // e.g. \ or \hello or \\hello
) ; // e.g. C: or C:\hello
}
return p . startsWith ( '/' ) ;
}
exports . isRooted = isRooted ;
/ * *
* Best effort attempt to determine whether a file exists and is executable .
* @ param filePath file path to check
* @ param extensions additional file extensions to try
* @ return if file exists and is executable , returns the file path . otherwise empty string .
* /
function tryGetExecutablePath ( filePath , extensions ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stats = undefined ;
try {
// test file exists
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// on Windows, test for valid extension
const upperExt = path . extname ( filePath ) . toUpperCase ( ) ;
if ( extensions . some ( validExt => validExt . toUpperCase ( ) === upperExt ) ) {
return filePath ;
}
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
// try each extension
const originalFilePath = filePath ;
for ( const extension of extensions ) {
filePath = originalFilePath + extension ;
stats = undefined ;
try {
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path . dirname ( filePath ) ;
const upperName = path . basename ( filePath ) . toUpperCase ( ) ;
for ( const actualName of yield exports . readdir ( directory ) ) {
if ( upperName === actualName . toUpperCase ( ) ) {
filePath = path . join ( directory , actualName ) ;
break ;
}
}
}
catch ( err ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine the actual case of the file ' ${ filePath } ': ${ err } ` ) ;
}
return filePath ;
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
}
return '' ;
} ) ;
}
exports . tryGetExecutablePath = tryGetExecutablePath ;
function normalizeSeparators ( p ) {
p = p || '' ;
if ( exports . IS _WINDOWS ) {
// convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// remove redundant slashes
return p . replace ( /\\\\+/g , '\\' ) ;
}
// remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
}
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable ( stats ) {
return ( ( stats . mode & 1 ) > 0 ||
( ( stats . mode & 8 ) > 0 && stats . gid === process . getgid ( ) ) ||
( ( stats . mode & 64 ) > 0 && stats . uid === process . getuid ( ) ) ) ;
}
// Get the path of cmd.exe in windows
function getCmdPath ( ) {
var _a ;
return ( _a = process . env [ 'COMSPEC' ] ) !== null && _a !== void 0 ? _a : ` cmd.exe ` ;
}
exports . getCmdPath = getCmdPath ;
//# sourceMappingURL=io-util.js.map
/***/ } ) ,
/***/ 7436 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . findInPath = exports . which = exports . mkdirP = exports . rmRF = exports . mv = exports . cp = void 0 ;
const assert _1 = _ _nccwpck _require _ _ ( 9491 ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const ioUtil = _ _importStar ( _ _nccwpck _require _ _ ( 1962 ) ) ;
/ * *
* Copies a file or folder .
* Based off of shelljs - https : //github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See CopyOptions .
* /
function cp ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const { force , recursive , copySourceDirectory } = readCopyOptions ( options ) ;
const destStat = ( yield ioUtil . exists ( dest ) ) ? yield ioUtil . stat ( dest ) : null ;
// Dest is an existing file, but not forcing
if ( destStat && destStat . isFile ( ) && ! force ) {
return ;
}
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat . isDirectory ( ) && copySourceDirectory
? path . join ( dest , path . basename ( source ) )
: dest ;
if ( ! ( yield ioUtil . exists ( source ) ) ) {
throw new Error ( ` no such file or directory: ${ source } ` ) ;
}
const sourceStat = yield ioUtil . stat ( source ) ;
if ( sourceStat . isDirectory ( ) ) {
if ( ! recursive ) {
throw new Error ( ` Failed to copy. ${ source } is a directory, but tried to copy without recursive flag. ` ) ;
}
else {
yield cpDirRecursive ( source , newDest , 0 , force ) ;
}
}
else {
if ( path . relative ( source , newDest ) === '' ) {
// a file cannot be copied to itself
throw new Error ( ` ' ${ newDest } ' and ' ${ source } ' are the same file ` ) ;
}
yield copyFile ( source , newDest , force ) ;
}
} ) ;
}
exports . cp = cp ;
/ * *
* Moves a path .
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See MoveOptions .
* /
function mv ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( yield ioUtil . exists ( dest ) ) {
let destExists = true ;
if ( yield ioUtil . isDirectory ( dest ) ) {
// If dest is directory copy src into dest
dest = path . join ( dest , path . basename ( source ) ) ;
destExists = yield ioUtil . exists ( dest ) ;
}
if ( destExists ) {
if ( options . force == null || options . force ) {
yield rmRF ( dest ) ;
}
else {
throw new Error ( 'Destination already exists' ) ;
}
}
}
yield mkdirP ( path . dirname ( dest ) ) ;
yield ioUtil . rename ( source , dest ) ;
} ) ;
}
exports . mv = mv ;
/ * *
* Remove a path recursively with force
*
* @ param inputPath path to remove
* /
function rmRF ( inputPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ioUtil . IS _WINDOWS ) {
// Check for invalid characters
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
if ( /[*"<>|]/ . test ( inputPath ) ) {
throw new Error ( 'File path must not contain `*`, `"`, `<`, `>` or `|` on Windows' ) ;
}
}
try {
// note if path does not exist, error is silent
yield ioUtil . rm ( inputPath , {
force : true ,
maxRetries : 3 ,
recursive : true ,
retryDelay : 300
} ) ;
}
catch ( err ) {
throw new Error ( ` File was unable to be removed ${ err } ` ) ;
}
} ) ;
}
exports . rmRF = rmRF ;
/ * *
* Make a directory . Creates the full path with folders in between
* Will throw if it fails
*
* @ param fsPath path to create
* @ returns Promise < void >
* /
function mkdirP ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( fsPath , 'a path argument must be provided' ) ;
yield ioUtil . mkdir ( fsPath , { recursive : true } ) ;
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Returns path of a tool had the tool actually been invoked . Resolves via paths .
* If you check and the tool does not exist , it will throw .
*
* @ param tool name of the tool
* @ param check whether to check if tool exists
* @ returns Promise < string > path to tool
* /
function which ( tool , check ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// recursive when check=true
if ( check ) {
const result = yield which ( tool , false ) ;
if ( ! result ) {
if ( ioUtil . IS _WINDOWS ) {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file. ` ) ;
}
else {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. ` ) ;
}
}
return result ;
}
const matches = yield findInPath ( tool ) ;
if ( matches && matches . length > 0 ) {
return matches [ 0 ] ;
}
return '' ;
} ) ;
}
exports . which = which ;
/ * *
* Returns a list of all occurrences of the given tool on the system path .
*
* @ returns Promise < string [ ] > the paths of the tool
* /
function findInPath ( tool ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// build the list of extensions to try
const extensions = [ ] ;
if ( ioUtil . IS _WINDOWS && process . env [ 'PATHEXT' ] ) {
for ( const extension of process . env [ 'PATHEXT' ] . split ( path . delimiter ) ) {
if ( extension ) {
extensions . push ( extension ) ;
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if ( ioUtil . isRooted ( tool ) ) {
const filePath = yield ioUtil . tryGetExecutablePath ( tool , extensions ) ;
if ( filePath ) {
return [ filePath ] ;
}
return [ ] ;
}
// if any path separators, return empty
if ( tool . includes ( path . sep ) ) {
return [ ] ;
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [ ] ;
if ( process . env . PATH ) {
for ( const p of process . env . PATH . split ( path . delimiter ) ) {
if ( p ) {
directories . push ( p ) ;
}
}
}
// find all matches
const matches = [ ] ;
for ( const directory of directories ) {
const filePath = yield ioUtil . tryGetExecutablePath ( path . join ( directory , tool ) , extensions ) ;
if ( filePath ) {
matches . push ( filePath ) ;
}
}
return matches ;
} ) ;
}
exports . findInPath = findInPath ;
function readCopyOptions ( options ) {
const force = options . force == null ? true : options . force ;
const recursive = Boolean ( options . recursive ) ;
const copySourceDirectory = options . copySourceDirectory == null
? true
: Boolean ( options . copySourceDirectory ) ;
return { force , recursive , copySourceDirectory } ;
}
function cpDirRecursive ( sourceDir , destDir , currentDepth , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Ensure there is not a run away recursive copy
if ( currentDepth >= 255 )
return ;
currentDepth ++ ;
yield mkdirP ( destDir ) ;
const files = yield ioUtil . readdir ( sourceDir ) ;
for ( const fileName of files ) {
const srcFile = ` ${ sourceDir } / ${ fileName } ` ;
const destFile = ` ${ destDir } / ${ fileName } ` ;
const srcFileStat = yield ioUtil . lstat ( srcFile ) ;
if ( srcFileStat . isDirectory ( ) ) {
// Recurse
yield cpDirRecursive ( srcFile , destFile , currentDepth , force ) ;
}
else {
yield copyFile ( srcFile , destFile , force ) ;
}
}
// Change the mode for the newly created directory
yield ioUtil . chmod ( destDir , ( yield ioUtil . stat ( sourceDir ) ) . mode ) ;
} ) ;
}
// Buffered file copy
function copyFile ( srcFile , destFile , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ( yield ioUtil . lstat ( srcFile ) ) . isSymbolicLink ( ) ) {
// unlink/re-link it
try {
yield ioUtil . lstat ( destFile ) ;
yield ioUtil . unlink ( destFile ) ;
}
catch ( e ) {
// Try to override file permission
if ( e . code === 'EPERM' ) {
yield ioUtil . chmod ( destFile , '0666' ) ;
yield ioUtil . unlink ( destFile ) ;
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil . readlink ( srcFile ) ;
yield ioUtil . symlink ( symlinkFull , destFile , ioUtil . IS _WINDOWS ? 'junction' : null ) ;
}
else if ( ! ( yield ioUtil . exists ( destFile ) ) || force ) {
yield ioUtil . copyFile ( srcFile , destFile ) ;
}
} ) ;
}
//# sourceMappingURL=io.js.map
/***/ } ) ,
/***/ 9417 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = balanced ;
function balanced ( a , b , str ) {
if ( a instanceof RegExp ) a = maybeMatch ( a , str ) ;
if ( b instanceof RegExp ) b = maybeMatch ( b , str ) ;
var r = range ( a , b , str ) ;
return r && {
start : r [ 0 ] ,
end : r [ 1 ] ,
pre : str . slice ( 0 , r [ 0 ] ) ,
body : str . slice ( r [ 0 ] + a . length , r [ 1 ] ) ,
post : str . slice ( r [ 1 ] + b . length )
} ;
}
function maybeMatch ( reg , str ) {
var m = str . match ( reg ) ;
return m ? m [ 0 ] : null ;
}
balanced . range = range ;
function range ( a , b , str ) {
var begs , beg , left , right , result ;
var ai = str . indexOf ( a ) ;
var bi = str . indexOf ( b , ai + 1 ) ;
var i = ai ;
if ( ai >= 0 && bi > 0 ) {
if ( a === b ) {
return [ ai , bi ] ;
}
begs = [ ] ;
left = str . length ;
while ( i >= 0 && ! result ) {
if ( i == ai ) {
begs . push ( i ) ;
ai = str . indexOf ( a , i + 1 ) ;
} else if ( begs . length == 1 ) {
result = [ begs . pop ( ) , bi ] ;
} else {
beg = begs . pop ( ) ;
if ( beg < left ) {
left = beg ;
right = bi ;
}
bi = str . indexOf ( b , i + 1 ) ;
}
i = ai < bi && ai >= 0 ? ai : bi ;
}
if ( begs . length ) {
result = [ left , right ] ;
}
}
return result ;
}
/***/ } ) ,
/***/ 3717 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
var concatMap = _ _nccwpck _require _ _ ( 6891 ) ;
var balanced = _ _nccwpck _require _ _ ( 9417 ) ;
module . exports = expandTop ;
var escSlash = '\0SLASH' + Math . random ( ) + '\0' ;
var escOpen = '\0OPEN' + Math . random ( ) + '\0' ;
var escClose = '\0CLOSE' + Math . random ( ) + '\0' ;
var escComma = '\0COMMA' + Math . random ( ) + '\0' ;
var escPeriod = '\0PERIOD' + Math . random ( ) + '\0' ;
function numeric ( str ) {
return parseInt ( str , 10 ) == str
? parseInt ( str , 10 )
: str . charCodeAt ( 0 ) ;
}
function escapeBraces ( str ) {
return str . split ( '\\\\' ) . join ( escSlash )
. split ( '\\{' ) . join ( escOpen )
. split ( '\\}' ) . join ( escClose )
. split ( '\\,' ) . join ( escComma )
. split ( '\\.' ) . join ( escPeriod ) ;
}
function unescapeBraces ( str ) {
return str . split ( escSlash ) . join ( '\\' )
. split ( escOpen ) . join ( '{' )
. split ( escClose ) . join ( '}' )
. split ( escComma ) . join ( ',' )
. split ( escPeriod ) . join ( '.' ) ;
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts ( str ) {
if ( ! str )
return [ '' ] ;
var parts = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m )
return str . split ( ',' ) ;
var pre = m . pre ;
var body = m . body ;
var post = m . post ;
var p = pre . split ( ',' ) ;
p [ p . length - 1 ] += '{' + body + '}' ;
var postParts = parseCommaParts ( post ) ;
if ( post . length ) {
p [ p . length - 1 ] += postParts . shift ( ) ;
p . push . apply ( p , postParts ) ;
}
parts . push . apply ( parts , p ) ;
return parts ;
}
function expandTop ( str ) {
if ( ! str )
return [ ] ;
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if ( str . substr ( 0 , 2 ) === '{}' ) {
str = '\\{\\}' + str . substr ( 2 ) ;
}
return expand ( escapeBraces ( str ) , true ) . map ( unescapeBraces ) ;
}
function identity ( e ) {
return e ;
}
function embrace ( str ) {
return '{' + str + '}' ;
}
function isPadded ( el ) {
return /^-?0\d/ . test ( el ) ;
}
function lte ( i , y ) {
return i <= y ;
}
function gte ( i , y ) {
return i >= y ;
}
function expand ( str , isTop ) {
var expansions = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m || /\$$/ . test ( m . pre ) ) return [ str ] ;
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isSequence = isNumericSequence || isAlphaSequence ;
var isOptions = m . body . indexOf ( ',' ) >= 0 ;
if ( ! isSequence && ! isOptions ) {
// {a},b}
if ( m . post . match ( /,.*\}/ ) ) {
str = m . pre + '{' + m . body + escClose + m . post ;
return expand ( str ) ;
}
return [ str ] ;
}
var n ;
if ( isSequence ) {
n = m . body . split ( /\.\./ ) ;
} else {
n = parseCommaParts ( m . body ) ;
if ( n . length === 1 ) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand ( n [ 0 ] , false ) . map ( embrace ) ;
if ( n . length === 1 ) {
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
return post . map ( function ( p ) {
return m . pre + n [ 0 ] + p ;
} ) ;
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m . pre ;
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
var N ;
if ( isSequence ) {
var x = numeric ( n [ 0 ] ) ;
var y = numeric ( n [ 1 ] ) ;
var width = Math . max ( n [ 0 ] . length , n [ 1 ] . length )
var incr = n . length == 3
? Math . abs ( numeric ( n [ 2 ] ) )
: 1 ;
var test = lte ;
var reverse = y < x ;
if ( reverse ) {
incr *= - 1 ;
test = gte ;
}
var pad = n . some ( isPadded ) ;
N = [ ] ;
for ( var i = x ; test ( i , y ) ; i += incr ) {
var c ;
if ( isAlphaSequence ) {
c = String . fromCharCode ( i ) ;
if ( c === '\\' )
c = '' ;
} else {
c = String ( i ) ;
if ( pad ) {
var need = width - c . length ;
if ( need > 0 ) {
var z = new Array ( need + 1 ) . join ( '0' ) ;
if ( i < 0 )
c = '-' + z + c . slice ( 1 ) ;
else
c = z + c ;
}
}
}
N . push ( c ) ;
}
} else {
N = concatMap ( n , function ( el ) { return expand ( el , false ) } ) ;
}
for ( var j = 0 ; j < N . length ; j ++ ) {
for ( var k = 0 ; k < post . length ; k ++ ) {
var expansion = pre + N [ j ] + post [ k ] ;
if ( ! isTop || isSequence || expansion )
expansions . push ( expansion ) ;
}
}
return expansions ;
}
/***/ } ) ,
/***/ 6891 :
/***/ ( ( module ) => {
module . exports = function ( xs , fn ) {
var res = [ ] ;
for ( var i = 0 ; i < xs . length ; i ++ ) {
var x = fn ( xs [ i ] , i ) ;
if ( isArray ( x ) ) res . push . apply ( res , x ) ;
else res . push ( x ) ;
}
return res ;
} ;
var isArray = Array . isArray || function ( xs ) {
return Object . prototype . toString . call ( xs ) === '[object Array]' ;
} ;
/***/ } ) ,
/***/ 3973 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
module . exports = minimatch
minimatch . Minimatch = Minimatch
var path = ( function ( ) { try { return _ _nccwpck _require _ _ ( 1017 ) } catch ( e ) { } } ( ) ) || {
sep : '/'
}
minimatch . sep = path . sep
var GLOBSTAR = minimatch . GLOBSTAR = Minimatch . GLOBSTAR = { }
var expand = _ _nccwpck _require _ _ ( 3717 )
var plTypes = {
'!' : { open : '(?:(?!(?:' , close : '))[^/]*?)' } ,
'?' : { open : '(?:' , close : ')?' } ,
'+' : { open : '(?:' , close : ')+' } ,
'*' : { open : '(?:' , close : ')*' } ,
'@' : { open : '(?:' , close : ')' }
}
// any single thing other than /
// don't need to escape / when using new RegExp()
var qmark = '[^/]'
// * => any number of characters
var star = qmark + '*?'
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
// characters that need to be escaped in RegExp.
var reSpecials = charSet ( '().*{}+?[]^$\\!' )
// "abc" -> { a:true, b:true, c:true }
function charSet ( s ) {
return s . split ( '' ) . reduce ( function ( set , c ) {
set [ c ] = true
return set
} , { } )
}
// normalizes slashes.
var slashSplit = /\/+/
minimatch . filter = filter
function filter ( pattern , options ) {
options = options || { }
return function ( p , i , list ) {
return minimatch ( p , pattern , options )
}
}
function ext ( a , b ) {
b = b || { }
var t = { }
Object . keys ( a ) . forEach ( function ( k ) {
t [ k ] = a [ k ]
} )
Object . keys ( b ) . forEach ( function ( k ) {
t [ k ] = b [ k ]
} )
return t
}
minimatch . defaults = function ( def ) {
if ( ! def || typeof def !== 'object' || ! Object . keys ( def ) . length ) {
return minimatch
}
var orig = minimatch
var m = function minimatch ( p , pattern , options ) {
return orig ( p , pattern , ext ( def , options ) )
}
m . Minimatch = function Minimatch ( pattern , options ) {
return new orig . Minimatch ( pattern , ext ( def , options ) )
}
m . Minimatch . defaults = function defaults ( options ) {
return orig . defaults ( ext ( def , options ) ) . Minimatch
}
m . filter = function filter ( pattern , options ) {
return orig . filter ( pattern , ext ( def , options ) )
}
m . defaults = function defaults ( options ) {
return orig . defaults ( ext ( def , options ) )
}
m . makeRe = function makeRe ( pattern , options ) {
return orig . makeRe ( pattern , ext ( def , options ) )
}
m . braceExpand = function braceExpand ( pattern , options ) {
return orig . braceExpand ( pattern , ext ( def , options ) )
}
m . match = function ( list , pattern , options ) {
return orig . match ( list , pattern , ext ( def , options ) )
}
return m
}
Minimatch . defaults = function ( def ) {
return minimatch . defaults ( def ) . Minimatch
}
function minimatch ( p , pattern , options ) {
assertValidPattern ( pattern )
if ( ! options ) options = { }
// shortcut: comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
return false
}
return new Minimatch ( pattern , options ) . match ( p )
}
function Minimatch ( pattern , options ) {
if ( ! ( this instanceof Minimatch ) ) {
return new Minimatch ( pattern , options )
}
assertValidPattern ( pattern )
if ( ! options ) options = { }
pattern = pattern . trim ( )
// windows support: need to use /, not \
if ( ! options . allowWindowsEscape && path . sep !== '/' ) {
pattern = pattern . split ( path . sep ) . join ( '/' )
}
this . options = options
this . set = [ ]
this . pattern = pattern
this . regexp = null
this . negate = false
this . comment = false
this . empty = false
this . partial = ! ! options . partial
// make the set of regexps etc.
this . make ( )
}
Minimatch . prototype . debug = function ( ) { }
Minimatch . prototype . make = make
function make ( ) {
var pattern = this . pattern
var options = this . options
// empty patterns and comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
this . comment = true
return
}
if ( ! pattern ) {
this . empty = true
return
}
// step 1: figure out negation, etc.
this . parseNegate ( )
// step 2: expand braces
var set = this . globSet = this . braceExpand ( )
if ( options . debug ) this . debug = function debug ( ) { console . error . apply ( console , arguments ) }
this . debug ( this . pattern , set )
// step 3: now we have a set, so turn each one into a series of path-portion
// matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
set = this . globParts = set . map ( function ( s ) {
return s . split ( slashSplit )
} )
this . debug ( this . pattern , set )
// glob --> regexps
set = set . map ( function ( s , si , set ) {
return s . map ( this . parse , this )
} , this )
this . debug ( this . pattern , set )
// filter out everything that didn't compile properly.
set = set . filter ( function ( s ) {
return s . indexOf ( false ) === - 1
} )
this . debug ( this . pattern , set )
this . set = set
}
Minimatch . prototype . parseNegate = parseNegate
function parseNegate ( ) {
var pattern = this . pattern
var negate = false
var options = this . options
var negateOffset = 0
if ( options . nonegate ) return
for ( var i = 0 , l = pattern . length
; i < l && pattern . charAt ( i ) === '!'
; i ++ ) {
negate = ! negate
negateOffset ++
}
if ( negateOffset ) this . pattern = pattern . substr ( negateOffset )
this . negate = negate
}
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
minimatch . braceExpand = function ( pattern , options ) {
return braceExpand ( pattern , options )
}
Minimatch . prototype . braceExpand = braceExpand
function braceExpand ( pattern , options ) {
if ( ! options ) {
if ( this instanceof Minimatch ) {
options = this . options
} else {
options = { }
}
}
pattern = typeof pattern === 'undefined'
? this . pattern : pattern
assertValidPattern ( pattern )
// Thanks to Yeting Li <https://github.com/yetingli> for
// improving this regexp to avoid a ReDOS vulnerability.
if ( options . nobrace || ! /\{(?:(?!\{).)*\}/ . test ( pattern ) ) {
// shortcut. no need to expand.
return [ pattern ]
}
return expand ( pattern )
}
var MAX _PATTERN _LENGTH = 1024 * 64
var assertValidPattern = function ( pattern ) {
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'invalid pattern' )
}
if ( pattern . length > MAX _PATTERN _LENGTH ) {
throw new TypeError ( 'pattern is too long' )
}
}
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
Minimatch . prototype . parse = parse
var SUBPARSE = { }
function parse ( pattern , isSub ) {
assertValidPattern ( pattern )
var options = this . options
// shortcuts
if ( pattern === '**' ) {
if ( ! options . noglobstar )
return GLOBSTAR
else
pattern = '*'
}
if ( pattern === '' ) return ''
var re = ''
var hasMagic = ! ! options . nocase
var escaping = false
// ? => one single character
var patternListStack = [ ]
var negativeLists = [ ]
var stateChar
var inClass = false
var reClassStart = - 1
var classStart = - 1
// . and .. never match anything that doesn't start with .,
// even when options.dot is set.
var patternStart = pattern . charAt ( 0 ) === '.' ? '' // anything
// not (start or / followed by . or .. followed by / or end)
: options . dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
: '(?!\\.)'
var self = this
function clearStateChar ( ) {
if ( stateChar ) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch ( stateChar ) {
case '*' :
re += star
hasMagic = true
break
case '?' :
re += qmark
hasMagic = true
break
default :
re += '\\' + stateChar
break
}
self . debug ( 'clearStateChar %j %j' , stateChar , re )
stateChar = false
}
}
for ( var i = 0 , len = pattern . length , c
; ( i < len ) && ( c = pattern . charAt ( i ) )
; i ++ ) {
this . debug ( '%s\t%s %s %j' , pattern , i , re , c )
// skip over any that are escaped.
if ( escaping && reSpecials [ c ] ) {
re += '\\' + c
escaping = false
continue
}
switch ( c ) {
/* istanbul ignore next */
case '/' : {
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
}
case '\\' :
clearStateChar ( )
escaping = true
continue
// the various stateChar values
// for the "extglob" stuff.
case '?' :
case '*' :
case '+' :
case '@' :
case '!' :
this . debug ( '%s\t%s %s %j <-- stateChar' , pattern , i , re , c )
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if ( inClass ) {
this . debug ( ' in class' )
if ( c === '!' && i === classStart + 1 ) c = '^'
re += c
continue
}
// if we already have a stateChar, then it means
// that there was something like ** or +? in there.
// Handle the stateChar, then proceed with this one.
self . debug ( 'call clearStateChar %j' , stateChar )
clearStateChar ( )
stateChar = c
// if extglob is disabled, then +(asdf|foo) isn't a thing.
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if ( options . noext ) clearStateChar ( )
continue
case '(' :
if ( inClass ) {
re += '('
continue
}
if ( ! stateChar ) {
re += '\\('
continue
}
patternListStack . push ( {
type : stateChar ,
start : i - 1 ,
reStart : re . length ,
open : plTypes [ stateChar ] . open ,
close : plTypes [ stateChar ] . close
} )
// negation is (?:(?!js)[^/]*)
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this . debug ( 'plType %j %j' , stateChar , re )
stateChar = false
continue
case ')' :
if ( inClass || ! patternListStack . length ) {
re += '\\)'
continue
}
clearStateChar ( )
hasMagic = true
var pl = patternListStack . pop ( )
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
re += pl . close
if ( pl . type === '!' ) {
negativeLists . push ( pl )
}
pl . reEnd = re . length
continue
case '|' :
if ( inClass || ! patternListStack . length || escaping ) {
re += '\\|'
escaping = false
continue
}
clearStateChar ( )
re += '|'
continue
// these are mostly the same in regexp and glob
case '[' :
// swallow any state-tracking char before the [
clearStateChar ( )
if ( inClass ) {
re += '\\' + c
continue
}
inClass = true
classStart = i
reClassStart = re . length
re += c
continue
case ']' :
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if ( i === classStart + 1 || ! inClass ) {
re += '\\' + c
escaping = false
continue
}
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern . substring ( classStart + 1 , i )
try {
RegExp ( '[' + cs + ']' )
} catch ( er ) {
// not a valid class!
var sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ] + '\\]'
hasMagic = hasMagic || sp [ 1 ]
inClass = false
continue
}
// finish up the class.
hasMagic = true
inClass = false
re += c
continue
default :
// swallow any state char that wasn't consumed
clearStateChar ( )
if ( escaping ) {
// no need
escaping = false
} else if ( reSpecials [ c ]
&& ! ( c === '^' && inClass ) ) {
re += '\\'
}
re += c
} // switch
} // for
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if ( inClass ) {
// split where the last [ was, and escape it
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
cs = pattern . substr ( classStart + 1 )
sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ]
hasMagic = hasMagic || sp [ 1 ]
}
// handle the case where we had a +( thing at the *end*
// of the pattern.
// each pattern list stack adds 3 chars, and we need to go through
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
for ( pl = patternListStack . pop ( ) ; pl ; pl = patternListStack . pop ( ) ) {
var tail = re . slice ( pl . reStart + pl . open . length )
this . debug ( 'setting tail' , re , pl )
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail . replace ( /((?:\\{2}){0,64})(\\?)\|/g , function ( _ , $1 , $2 ) {
if ( ! $2 ) {
// the | isn't already escaped, so escape it.
$2 = '\\'
}
// need to escape all those slashes *again*, without escaping the
// one that we need for escaping the | character. As it works out,
// escaping an even number of slashes can be done by simply repeating
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
return $1 + $1 + $2 + '|'
} )
this . debug ( 'tail=%j\n %s' , tail , tail , pl , re )
var t = pl . type === '*' ? star
: pl . type === '?' ? qmark
: '\\' + pl . type
hasMagic = true
re = re . slice ( 0 , pl . reStart ) + t + '\\(' + tail
}
// handle trailing things that only matter at the very end.
clearStateChar ( )
if ( escaping ) {
// trailing \\
re += '\\\\'
}
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch ( re . charAt ( 0 ) ) {
case '[' : case '.' : case '(' : addPatternStart = true
}
// Hack to work around lack of negative lookbehind in JS
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
// like 'a.xyz.yz' doesn't match. So, the first negative
// lookahead, has to look ALL the way ahead, to the end of
// the pattern.
for ( var n = negativeLists . length - 1 ; n > - 1 ; n -- ) {
var nl = negativeLists [ n ]
var nlBefore = re . slice ( 0 , nl . reStart )
var nlFirst = re . slice ( nl . reStart , nl . reEnd - 8 )
var nlLast = re . slice ( nl . reEnd - 8 , nl . reEnd )
var nlAfter = re . slice ( nl . reEnd )
nlLast += nlAfter
// Handle nested stuff like *(*.js|!(*.json)), where open parens
// mean that we should *not* include the ) in the bit that is considered
// "after" the negated section.
var openParensBefore = nlBefore . split ( '(' ) . length - 1
var cleanAfter = nlAfter
for ( i = 0 ; i < openParensBefore ; i ++ ) {
cleanAfter = cleanAfter . replace ( /\)[+*?]?/ , '' )
}
nlAfter = cleanAfter
var dollar = ''
if ( nlAfter === '' && isSub !== SUBPARSE ) {
dollar = '$'
}
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
re = newRe
}
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
if ( re !== '' && hasMagic ) {
re = '(?=.)' + re
}
if ( addPatternStart ) {
re = patternStart + re
}
// parsing just a piece of a larger pattern.
if ( isSub === SUBPARSE ) {
return [ re , hasMagic ]
}
// skip the regexp for non-magical patterns
// unescape anything in it, though, so that it'll be
// an exact match against a file etc.
if ( ! hasMagic ) {
return globUnescape ( pattern )
}
var flags = options . nocase ? 'i' : ''
try {
var regExp = new RegExp ( '^' + re + '$' , flags )
} catch ( er ) /* istanbul ignore next - should be impossible */ {
// If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line
// mode, but it's not a /m regex.
return new RegExp ( '$.' )
}
regExp . _glob = pattern
regExp . _src = re
return regExp
}
minimatch . makeRe = function ( pattern , options ) {
return new Minimatch ( pattern , options || { } ) . makeRe ( )
}
Minimatch . prototype . makeRe = makeRe
function makeRe ( ) {
if ( this . regexp || this . regexp === false ) return this . regexp
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
var set = this . set
if ( ! set . length ) {
this . regexp = false
return this . regexp
}
var options = this . options
var twoStar = options . noglobstar ? star
: options . dot ? twoStarDot
: twoStarNoDot
var flags = options . nocase ? 'i' : ''
var re = set . map ( function ( pattern ) {
return pattern . map ( function ( p ) {
return ( p === GLOBSTAR ) ? twoStar
: ( typeof p === 'string' ) ? regExpEscape ( p )
: p . _src
} ) . join ( '\\\/' )
} ) . join ( '|' )
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^(?:' + re + ')$'
// can match anything, as long as it's not this.
if ( this . negate ) re = '^(?!' + re + ').*$'
try {
this . regexp = new RegExp ( re , flags )
} catch ( ex ) /* istanbul ignore next - should be impossible */ {
this . regexp = false
}
return this . regexp
}
minimatch . match = function ( list , pattern , options ) {
options = options || { }
var mm = new Minimatch ( pattern , options )
list = list . filter ( function ( f ) {
return mm . match ( f )
} )
if ( mm . options . nonull && ! list . length ) {
list . push ( pattern )
}
return list
}
Minimatch . prototype . match = function match ( f , partial ) {
if ( typeof partial === 'undefined' ) partial = this . partial
this . debug ( 'match' , f , this . pattern )
// short-circuit in the case of busted things.
// comments, etc.
if ( this . comment ) return false
if ( this . empty ) return f === ''
if ( f === '/' && partial ) return true
var options = this . options
// windows: need to use /, not \
if ( path . sep !== '/' ) {
f = f . split ( path . sep ) . join ( '/' )
}
// treat the test path as a set of pathparts.
f = f . split ( slashSplit )
this . debug ( this . pattern , 'split' , f )
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
var set = this . set
this . debug ( this . pattern , 'set' , set )
// Find the basename of the path by looking for the last non-empty segment
var filename
var i
for ( i = f . length - 1 ; i >= 0 ; i -- ) {
filename = f [ i ]
if ( filename ) break
}
for ( i = 0 ; i < set . length ; i ++ ) {
var pattern = set [ i ]
var file = f
if ( options . matchBase && pattern . length === 1 ) {
file = [ filename ]
}
var hit = this . matchOne ( file , pattern , partial )
if ( hit ) {
if ( options . flipNegate ) return true
return ! this . negate
}
}
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if ( options . flipNegate ) return false
return this . negate
}
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
Minimatch . prototype . matchOne = function ( file , pattern , partial ) {
var options = this . options
this . debug ( 'matchOne' ,
{ 'this' : this , file : file , pattern : pattern } )
this . debug ( 'matchOne' , file . length , pattern . length )
for ( var fi = 0 ,
pi = 0 ,
fl = file . length ,
pl = pattern . length
; ( fi < fl ) && ( pi < pl )
; fi ++ , pi ++ ) {
this . debug ( 'matchOne loop' )
var p = pattern [ pi ]
var f = file [ fi ]
this . debug ( pattern , p , f )
// should be impossible.
// some invalid regexp stuff in the set.
/* istanbul ignore if */
if ( p === false ) return false
if ( p === GLOBSTAR ) {
this . debug ( 'GLOBSTAR' , [ pattern , p , f ] )
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
var pr = pi + 1
if ( pr === pl ) {
this . debug ( '** at the end' )
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for ( ; fi < fl ; fi ++ ) {
if ( file [ fi ] === '.' || file [ fi ] === '..' ||
( ! options . dot && file [ fi ] . charAt ( 0 ) === '.' ) ) return false
}
return true
}
// ok, let's see if we can swallow whatever we can.
while ( fr < fl ) {
var swallowee = file [ fr ]
this . debug ( '\nglobstar while' , file , fr , pattern , pr , swallowee )
// XXX remove this slice. Just pass the start index.
if ( this . matchOne ( file . slice ( fr ) , pattern . slice ( pr ) , partial ) ) {
this . debug ( 'globstar found match!' , fr , fl , swallowee )
// found a match.
return true
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if ( swallowee === '.' || swallowee === '..' ||
( ! options . dot && swallowee . charAt ( 0 ) === '.' ) ) {
this . debug ( 'dot detected!' , file , fr , pattern , pr )
break
}
// ** swallows a segment, and continue.
this . debug ( 'globstar swallow a segment, and continue' )
fr ++
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
/* istanbul ignore if */
if ( partial ) {
// ran out of file
this . debug ( '\n>>> no match, partial?' , file , fr , pattern , pr )
if ( fr === fl ) return true
}
return false
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
if ( typeof p === 'string' ) {
hit = f === p
this . debug ( 'string match' , p , f , hit )
} else {
hit = f . match ( p )
this . debug ( 'pattern match' , p , f , hit )
}
if ( ! hit ) return false
}
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if ( fi === fl && pi === pl ) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true
} else if ( fi === fl ) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else /* istanbul ignore else */ if ( pi === pl ) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
return ( fi === fl - 1 ) && ( file [ fi ] === '' )
}
// should be unreachable.
/* istanbul ignore next */
throw new Error ( 'wtf?' )
}
// replace stuff like \* with *
function globUnescape ( s ) {
return s . replace ( /\\(.)/g , '$1' )
}
function regExpEscape ( s ) {
return s . replace ( /[-[\]{}()*+?.,\\^$|#\s]/g , '\\$&' )
}
/***/ } ) ,
/***/ 4294 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
module . exports = _ _nccwpck _require _ _ ( 4219 ) ;
/***/ } ) ,
/***/ 4219 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
var net = _ _nccwpck _require _ _ ( 1808 ) ;
var tls = _ _nccwpck _require _ _ ( 4404 ) ;
var http = _ _nccwpck _require _ _ ( 3685 ) ;
var https = _ _nccwpck _require _ _ ( 5687 ) ;
var events = _ _nccwpck _require _ _ ( 2361 ) ;
var assert = _ _nccwpck _require _ _ ( 9491 ) ;
var util = _ _nccwpck _require _ _ ( 3837 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
}
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
} ) ;
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
}
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
}
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
}
}
}
}
return target ;
}
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
}
exports . debug = debug ; // for test
/***/ } ) ,
/***/ 5840 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
Object . defineProperty ( exports , "v1" , ( {
enumerable : true ,
get : function ( ) {
return _v . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v3" , ( {
enumerable : true ,
get : function ( ) {
return _v2 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v4" , ( {
enumerable : true ,
get : function ( ) {
return _v3 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v5" , ( {
enumerable : true ,
get : function ( ) {
return _v4 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "NIL" , ( {
enumerable : true ,
get : function ( ) {
return _nil . default ;
}
} ) ) ;
Object . defineProperty ( exports , "version" , ( {
enumerable : true ,
get : function ( ) {
return _version . default ;
}
} ) ) ;
Object . defineProperty ( exports , "validate" , ( {
enumerable : true ,
get : function ( ) {
return _validate . default ;
}
} ) ) ;
Object . defineProperty ( exports , "stringify" , ( {
enumerable : true ,
get : function ( ) {
return _stringify . default ;
}
} ) ) ;
Object . defineProperty ( exports , "parse" , ( {
enumerable : true ,
get : function ( ) {
return _parse . default ;
}
} ) ) ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8628 ) ) ;
var _v2 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6409 ) ) ;
var _v3 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5122 ) ) ;
var _v4 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 9120 ) ) ;
var _nil = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5332 ) ) ;
var _version = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1595 ) ) ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8950 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2746 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/***/ } ) ,
/***/ 4569 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function md5 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'md5' ) . update ( bytes ) . digest ( ) ;
}
var _default = md5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 5332 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = '00000000-0000-0000-0000-000000000000' ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2746 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function parse ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
let v ;
const arr = new Uint8Array ( 16 ) ; // Parse ########-....-....-....-............
arr [ 0 ] = ( v = parseInt ( uuid . slice ( 0 , 8 ) , 16 ) ) >>> 24 ;
arr [ 1 ] = v >>> 16 & 0xff ;
arr [ 2 ] = v >>> 8 & 0xff ;
arr [ 3 ] = v & 0xff ; // Parse ........-####-....-....-............
arr [ 4 ] = ( v = parseInt ( uuid . slice ( 9 , 13 ) , 16 ) ) >>> 8 ;
arr [ 5 ] = v & 0xff ; // Parse ........-....-####-....-............
arr [ 6 ] = ( v = parseInt ( uuid . slice ( 14 , 18 ) , 16 ) ) >>> 8 ;
arr [ 7 ] = v & 0xff ; // Parse ........-....-....-####-............
arr [ 8 ] = ( v = parseInt ( uuid . slice ( 19 , 23 ) , 16 ) ) >>> 8 ;
arr [ 9 ] = v & 0xff ; // Parse ........-....-....-....-############
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
arr [ 10 ] = ( v = parseInt ( uuid . slice ( 24 , 36 ) , 16 ) ) / 0x10000000000 & 0xff ;
arr [ 11 ] = v / 0x100000000 & 0xff ;
arr [ 12 ] = v >>> 24 & 0xff ;
arr [ 13 ] = v >>> 16 & 0xff ;
arr [ 14 ] = v >>> 8 & 0xff ;
arr [ 15 ] = v & 0xff ;
return arr ;
}
var _default = parse ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 814 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 807 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = rng ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const rnds8Pool = new Uint8Array ( 256 ) ; // # of random values to pre-allocate
let poolPtr = rnds8Pool . length ;
function rng ( ) {
if ( poolPtr > rnds8Pool . length - 16 ) {
_crypto . default . randomFillSync ( rnds8Pool ) ;
poolPtr = 0 ;
}
return rnds8Pool . slice ( poolPtr , poolPtr += 16 ) ;
}
/***/ } ) ,
/***/ 5274 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function sha1 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'sha1' ) . update ( bytes ) . digest ( ) ;
}
var _default = sha1 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 8950 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
const byteToHex = [ ] ;
for ( let i = 0 ; i < 256 ; ++ i ) {
byteToHex . push ( ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ) ;
}
function stringify ( arr , offset = 0 ) {
// Note: Be careful editing this code! It's been tuned for performance
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
const uuid = ( byteToHex [ arr [ offset + 0 ] ] + byteToHex [ arr [ offset + 1 ] ] + byteToHex [ arr [ offset + 2 ] ] + byteToHex [ arr [ offset + 3 ] ] + '-' + byteToHex [ arr [ offset + 4 ] ] + byteToHex [ arr [ offset + 5 ] ] + '-' + byteToHex [ arr [ offset + 6 ] ] + byteToHex [ arr [ offset + 7 ] ] + '-' + byteToHex [ arr [ offset + 8 ] ] + byteToHex [ arr [ offset + 9 ] ] + '-' + byteToHex [ arr [ offset + 10 ] ] + byteToHex [ arr [ offset + 11 ] ] + byteToHex [ arr [ offset + 12 ] ] + byteToHex [ arr [ offset + 13 ] ] + byteToHex [ arr [ offset + 14 ] ] + byteToHex [ arr [ offset + 15 ] ] ) . toLowerCase ( ) ; // Consistency check for valid UUID. If this throws, it's likely due to one
// of the following:
// - One or more input array values don't map to a hex octet (leading to
// "undefined" in the uuid)
// - Invalid input values for the RFC `version` or `variant` fields
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Stringified UUID is invalid' ) ;
}
return uuid ;
}
var _default = stringify ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 8628 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 807 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8950 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
let _nodeId ;
let _clockseq ; // Previous uuid creation time
let _lastMSecs = 0 ;
let _lastNSecs = 0 ; // See https://github.com/uuidjs/uuid for API details
function v1 ( options , buf , offset ) {
let i = buf && offset || 0 ;
const b = buf || new Array ( 16 ) ;
options = options || { } ;
let node = options . node || _nodeId ;
let clockseq = options . clockseq !== undefined ? options . clockseq : _clockseq ; // node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if ( node == null || clockseq == null ) {
const seedBytes = options . random || ( options . rng || _rng . default ) ( ) ;
if ( node == null ) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [ seedBytes [ 0 ] | 0x01 , seedBytes [ 1 ] , seedBytes [ 2 ] , seedBytes [ 3 ] , seedBytes [ 4 ] , seedBytes [ 5 ] ] ;
}
if ( clockseq == null ) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = ( seedBytes [ 6 ] << 8 | seedBytes [ 7 ] ) & 0x3fff ;
}
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
let msecs = options . msecs !== undefined ? options . msecs : Date . now ( ) ; // Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
let nsecs = options . nsecs !== undefined ? options . nsecs : _lastNSecs + 1 ; // Time since last uuid creation (in msecs)
const dt = msecs - _lastMSecs + ( nsecs - _lastNSecs ) / 10000 ; // Per 4.2.1.2, Bump clockseq on clock regression
if ( dt < 0 && options . clockseq === undefined ) {
clockseq = clockseq + 1 & 0x3fff ;
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ( ( dt < 0 || msecs > _lastMSecs ) && options . nsecs === undefined ) {
nsecs = 0 ;
} // Per 4.2.1.2 Throw error if too many uuids are requested
if ( nsecs >= 10000 ) {
throw new Error ( "uuid.v1(): Can't create more than 10M uuids/sec" ) ;
}
_lastMSecs = msecs ;
_lastNSecs = nsecs ;
_clockseq = clockseq ; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000 ; // `time_low`
const tl = ( ( msecs & 0xfffffff ) * 10000 + nsecs ) % 0x100000000 ;
b [ i ++ ] = tl >>> 24 & 0xff ;
b [ i ++ ] = tl >>> 16 & 0xff ;
b [ i ++ ] = tl >>> 8 & 0xff ;
b [ i ++ ] = tl & 0xff ; // `time_mid`
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff ;
b [ i ++ ] = tmh >>> 8 & 0xff ;
b [ i ++ ] = tmh & 0xff ; // `time_high_and_version`
b [ i ++ ] = tmh >>> 24 & 0xf | 0x10 ; // include version
b [ i ++ ] = tmh >>> 16 & 0xff ; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b [ i ++ ] = clockseq >>> 8 | 0x80 ; // `clock_seq_low`
b [ i ++ ] = clockseq & 0xff ; // `node`
for ( let n = 0 ; n < 6 ; ++ n ) {
b [ i + n ] = node [ n ] ;
}
return buf || ( 0 , _stringify . default ) ( b ) ;
}
var _default = v1 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6409 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5998 ) ) ;
var _md = _interopRequireDefault ( _ _nccwpck _require _ _ ( 4569 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v3 = ( 0 , _v . default ) ( 'v3' , 0x30 , _md . default ) ;
var _default = v3 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 5998 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = _default ;
exports . URL = exports . DNS = void 0 ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8950 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2746 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function stringToBytes ( str ) {
str = unescape ( encodeURIComponent ( str ) ) ; // UTF8 escape
const bytes = [ ] ;
for ( let i = 0 ; i < str . length ; ++ i ) {
bytes . push ( str . charCodeAt ( i ) ) ;
}
return bytes ;
}
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8' ;
exports . DNS = DNS ;
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8' ;
exports . URL = URL ;
function _default ( name , version , hashfunc ) {
function generateUUID ( value , namespace , buf , offset ) {
if ( typeof value === 'string' ) {
value = stringToBytes ( value ) ;
}
if ( typeof namespace === 'string' ) {
namespace = ( 0 , _parse . default ) ( namespace ) ;
}
if ( namespace . length !== 16 ) {
throw TypeError ( 'Namespace must be array-like (16 iterable integer values, 0-255)' ) ;
} // Compute hash of namespace and value, Per 4.3
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
// hashfunc([...namespace, ... value])`
let bytes = new Uint8Array ( 16 + value . length ) ;
bytes . set ( namespace ) ;
bytes . set ( value , namespace . length ) ;
bytes = hashfunc ( bytes ) ;
bytes [ 6 ] = bytes [ 6 ] & 0x0f | version ;
bytes [ 8 ] = bytes [ 8 ] & 0x3f | 0x80 ;
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = bytes [ i ] ;
}
return buf ;
}
return ( 0 , _stringify . default ) ( bytes ) ;
} // Function#name is not settable on some platforms (#270)
try {
generateUUID . name = name ; // eslint-disable-next-line no-empty
} catch ( err ) { } // For CommonJS default export support
generateUUID . DNS = DNS ;
generateUUID . URL = URL ;
return generateUUID ;
}
/***/ } ) ,
/***/ 5122 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 807 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8950 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function v4 ( options , buf , offset ) {
options = options || { } ;
const rnds = options . random || ( options . rng || _rng . default ) ( ) ; // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = rnds [ 6 ] & 0x0f | 0x40 ;
rnds [ 8 ] = rnds [ 8 ] & 0x3f | 0x80 ; // Copy bytes to buffer, if provided
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = rnds [ i ] ;
}
return buf ;
}
return ( 0 , _stringify . default ) ( rnds ) ;
}
var _default = v4 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 9120 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5998 ) ) ;
var _sha = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5274 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v5 = ( 0 , _v . default ) ( 'v5' , 0x50 , _sha . default ) ;
var _default = v5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6900 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _regex = _interopRequireDefault ( _ _nccwpck _require _ _ ( 814 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function validate ( uuid ) {
return typeof uuid === 'string' && _regex . default . test ( uuid ) ;
}
var _default = validate ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1595 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function version ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
return parseInt ( uuid . substr ( 14 , 1 ) , 16 ) ;
}
var _default = version ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 9491 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 2081 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "child_process" ) ;
/***/ } ) ,
/***/ 6113 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "crypto" ) ;
/***/ } ) ,
/***/ 2361 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "events" ) ;
/***/ } ) ,
/***/ 7147 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "fs" ) ;
/***/ } ) ,
/***/ 3685 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 5687 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "https" ) ;
/***/ } ) ,
/***/ 1808 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "net" ) ;
/***/ } ) ,
/***/ 2037 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "os" ) ;
/***/ } ) ,
/***/ 1017 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 4521 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "readline" ) ;
/***/ } ) ,
/***/ 1576 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "string_decoder" ) ;
/***/ } ) ,
/***/ 9512 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "timers" ) ;
/***/ } ) ,
/***/ 4404 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "tls" ) ;
/***/ } ) ,
/***/ 3837 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "util" ) ;
/***/ } )
/******/ } ) ;
/************************************************************************/
/******/ // The module cache
/******/ var _ _webpack _module _cache _ _ = { } ;
/******/
/******/ // The require function
/******/ function _ _nccwpck _require _ _ ( moduleId ) {
/******/ // Check if module is in cache
/******/ var cachedModule = _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ if ( cachedModule !== undefined ) {
/******/ return cachedModule . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = _ _webpack _module _cache _ _ [ moduleId ] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ var threw = true ;
/******/ try {
/******/ _ _webpack _modules _ _ [ moduleId ] . call ( module . exports , module , module . exports , _ _nccwpck _require _ _ ) ;
/******/ threw = false ;
/******/ } finally {
/******/ if ( threw ) delete _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ }
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/************************************************************************/
/******/ /* webpack/runtime/compat */
/******/
/******/ if ( typeof _ _nccwpck _require _ _ !== 'undefined' ) _ _nccwpck _require _ _ . ab = _ _dirname + "/" ;
/******/
/************************************************************************/
/******/
/******/ // startup
/******/ // Load entry module and return exports
/******/ // This entry module is referenced by other modules so it can't be inlined
/******/ var _ _webpack _exports _ _ = _ _nccwpck _require _ _ ( 3109 ) ;
/******/ module . exports = _ _webpack _exports _ _ ;
/******/
/******/ } ) ( )
;
//# sourceMappingURL=index.js.map