2019-11-24 16:44:26 +09:00
module . exports =
/******/ ( function ( modules , runtime ) { // webpackBootstrap
/******/ "use strict" ;
/******/ // The module cache
/******/ var installedModules = { } ;
/******/
/******/ // The require function
/******/ function _ _webpack _require _ _ ( moduleId ) {
/******/
/******/ // Check if module is in cache
/******/ if ( installedModules [ moduleId ] ) {
/******/ return installedModules [ moduleId ] . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules [ moduleId ] = {
/******/ i : moduleId ,
/******/ l : false ,
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
2020-05-22 02:21:32 +00:00
/******/ var threw = true ;
/******/ try {
/******/ modules [ moduleId ] . call ( module . exports , module , module . exports , _ _webpack _require _ _ ) ;
/******/ threw = false ;
/******/ } finally {
/******/ if ( threw ) delete installedModules [ moduleId ] ;
/******/ }
2019-11-24 16:44:26 +09:00
/******/
/******/ // Flag the module as loaded
/******/ module . l = true ;
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/******/
/******/ _ _webpack _require _ _ . ab = _ _dirname + "/" ;
/******/
/******/ // the startup function
/******/ function startup ( ) {
/******/ // Load entry module and return exports
2020-08-22 14:57:57 +09:00
/******/ return _ _webpack _require _ _ ( 932 ) ;
2019-11-24 16:44:26 +09:00
/******/ } ;
/******/
/******/ // run startup
/******/ return startup ( ) ;
/******/ } )
/************************************************************************/
/******/ ( {
2020-08-22 14:57:57 +09:00
/***/ 1 :
2019-11-24 16:44:26 +09:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-22 14:57:57 +09:00
module . exports = isexe
isexe . sync = sync
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var fs = _ _webpack _require _ _ ( 747 )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function checkPathExt ( path , options ) {
var pathext = options . pathExt !== undefined ?
options . pathExt : process . env . PATHEXT
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( ! pathext ) {
return true
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
pathext = pathext . split ( ';' )
if ( pathext . indexOf ( '' ) !== - 1 ) {
return true
}
for ( var i = 0 ; i < pathext . length ; i ++ ) {
var p = pathext [ i ] . toLowerCase ( )
if ( p && path . substr ( - p . length ) . toLowerCase ( ) === p ) {
return true
}
}
return false
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function checkStat ( stat , path , options ) {
if ( ! stat . isSymbolicLink ( ) && ! stat . isFile ( ) ) {
return false
}
return checkPathExt ( path , options )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function isexe ( path , options , cb ) {
fs . stat ( path , function ( er , stat ) {
cb ( er , er ? false : checkStat ( stat , path , options ) )
} )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function sync ( path , options ) {
return checkStat ( fs . statSync ( path ) , path , options )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ } ) ,
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ 16 :
/***/ ( function ( module ) {
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
module . exports = require ( "tls" ) ;
2019-11-24 16:44:26 +09:00
/***/ } ) ,
2020-08-22 14:57:57 +09:00
/***/ 30 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
"use strict" ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
2019-11-24 16:44:26 +09:00
} ;
2020-08-22 14:57:57 +09:00
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . getOctokitOptions = exports . GitHub = exports . context = void 0 ;
const Context = _ _importStar ( _ _webpack _require _ _ ( 53 ) ) ;
const Utils = _ _importStar ( _ _webpack _require _ _ ( 914 ) ) ;
// octokit + plugins
const core _1 = _ _webpack _require _ _ ( 762 ) ;
const plugin _rest _endpoint _methods _1 = _ _webpack _require _ _ ( 44 ) ;
const plugin _paginate _rest _1 = _ _webpack _require _ _ ( 193 ) ;
exports . context = new Context . Context ( ) ;
const baseUrl = Utils . getApiBaseUrl ( ) ;
const defaults = {
baseUrl ,
request : {
agent : Utils . getProxyAgent ( baseUrl )
}
2019-11-24 16:44:26 +09:00
} ;
2020-08-22 14:57:57 +09:00
exports . GitHub = core _1 . Octokit . plugin ( plugin _rest _endpoint _methods _1 . restEndpointMethods , plugin _paginate _rest _1 . paginateRest ) . defaults ( defaults ) ;
/ * *
* Convience function to correctly format Octokit Options to pass into the constructor .
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokitOptions ( token , options ) {
const opts = Object . assign ( { } , options || { } ) ; // Shallow clone - don't mutate the object provided by the caller
// Auth
const auth = Utils . getAuthString ( token , opts ) ;
if ( auth ) {
opts . auth = auth ;
}
return opts ;
}
exports . getOctokitOptions = getOctokitOptions ;
//# sourceMappingURL=utils.js.map
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ } ) ,
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ 32 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
"use strict" ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var shebangRegex = _ _webpack _require _ _ ( 638 ) ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
module . exports = function ( str ) {
var match = str . match ( shebangRegex ) ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( ! match ) {
return null ;
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
var arr = match [ 0 ] . replace ( /#! ?/ , '' ) . split ( ' ' ) ;
var bin = arr [ 0 ] . split ( '/' ) . pop ( ) ;
var arg = arr [ 1 ] ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
return ( bin === 'env' ?
arg :
bin + ( arg ? ' ' + arg : '' )
) ;
2019-11-24 16:44:26 +09:00
} ;
/***/ } ) ,
2020-08-22 14:57:57 +09:00
/***/ 44 :
/***/ ( function ( _ _unusedmodule , exports ) {
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
"use strict" ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
2020-05-01 11:47:52 +00:00
2020-08-22 14:57:57 +09:00
const Endpoints = {
actions : {
addSelectedRepoToOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ] ,
cancelWorkflowRun : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" ] ,
createOrUpdateOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}" ] ,
createOrUpdateRepoSecret : [ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" , { } , {
renamedParameters : {
name : "secret_name"
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
} ] ,
createOrUpdateSecretForRepo : [ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" , { } , {
renamed : [ "actions" , "createOrUpdateRepoSecret" ] ,
renamedParameters : {
name : "secret_name"
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
} ] ,
createRegistrationToken : [ "POST /repos/{owner}/{repo}/actions/runners/registration-token" , { } , {
renamed : [ "actions" , "createRegistrationTokenForRepo" ]
} ] ,
createRegistrationTokenForOrg : [ "POST /orgs/{org}/actions/runners/registration-token" ] ,
createRegistrationTokenForRepo : [ "POST /repos/{owner}/{repo}/actions/runners/registration-token" ] ,
createRemoveToken : [ "POST /repos/{owner}/{repo}/actions/runners/remove-token" , { } , {
renamed : [ "actions" , "createRemoveTokenForRepo" ]
} ] ,
createRemoveTokenForOrg : [ "POST /orgs/{org}/actions/runners/remove-token" ] ,
createRemoveTokenForRepo : [ "POST /repos/{owner}/{repo}/actions/runners/remove-token" ] ,
deleteArtifact : [ "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
deleteOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}" ] ,
deleteRepoSecret : [ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" , { } , {
renamedParameters : {
name : "secret_name"
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
} ] ,
deleteSecretFromRepo : [ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" , { } , {
renamed : [ "actions" , "deleteRepoSecret" ] ,
renamedParameters : {
name : "secret_name"
2020-03-28 08:56:01 +09:00
}
2020-08-22 14:57:57 +09:00
} ] ,
deleteSelfHostedRunnerFromOrg : [ "DELETE /orgs/{org}/actions/runners/{runner_id}" ] ,
deleteSelfHostedRunnerFromRepo : [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" ] ,
deleteWorkflowRunLogs : [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ] ,
downloadArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" ] ,
downloadWorkflowJobLogs : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" ] ,
downloadWorkflowRunLogs : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ] ,
getArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
getOrgPublicKey : [ "GET /orgs/{org}/actions/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}" ] ,
getPublicKey : [ "GET /repos/{owner}/{repo}/actions/secrets/public-key" , { } , {
renamed : [ "actions" , "getRepoPublicKey" ]
} ] ,
getRepoPublicKey : [ "GET /repos/{owner}/{repo}/actions/secrets/public-key" ] ,
getRepoSecret : [ "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}" , { } , {
renamedParameters : {
name : "secret_name"
2020-03-28 08:56:01 +09:00
}
2020-08-22 14:57:57 +09:00
} ] ,
getSecret : [ "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}" , { } , {
renamed : [ "actions" , "getRepoSecret" ] ,
renamedParameters : {
name : "secret_name"
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} ] ,
getSelfHostedRunner : [ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" , { } , {
renamed : [ "actions" , "getSelfHostedRunnerForRepo" ]
} ] ,
getSelfHostedRunnerForOrg : [ "GET /orgs/{org}/actions/runners/{runner_id}" ] ,
getSelfHostedRunnerForRepo : [ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" ] ,
getWorkflow : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}" ] ,
getWorkflowJob : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}" ] ,
getWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
getWorkflowRunUsage : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" ] ,
getWorkflowUsage : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" ] ,
listArtifactsForRepo : [ "GET /repos/{owner}/{repo}/actions/artifacts" ] ,
listDownloadsForSelfHostedRunnerApplication : [ "GET /repos/{owner}/{repo}/actions/runners/downloads" , { } , {
renamed : [ "actions" , "listRunnerApplicationsForRepo" ]
} ] ,
listJobsForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" ] ,
listOrgSecrets : [ "GET /orgs/{org}/actions/secrets" ] ,
listRepoSecrets : [ "GET /repos/{owner}/{repo}/actions/secrets" ] ,
listRepoWorkflowRuns : [ "GET /repos/{owner}/{repo}/actions/runs" ] ,
listRepoWorkflows : [ "GET /repos/{owner}/{repo}/actions/workflows" ] ,
listRunnerApplicationsForOrg : [ "GET /orgs/{org}/actions/runners/downloads" ] ,
listRunnerApplicationsForRepo : [ "GET /repos/{owner}/{repo}/actions/runners/downloads" ] ,
listSecretsForRepo : [ "GET /repos/{owner}/{repo}/actions/secrets" , { } , {
renamed : [ "actions" , "listRepoSecrets" ]
} ] ,
listSelectedReposForOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" ] ,
listSelfHostedRunnersForOrg : [ "GET /orgs/{org}/actions/runners" ] ,
listSelfHostedRunnersForRepo : [ "GET /repos/{owner}/{repo}/actions/runners" ] ,
listWorkflowJobLogs : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" , { } , {
renamed : [ "actions" , "downloadWorkflowJobLogs" ]
} ] ,
listWorkflowRunArtifacts : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" ] ,
listWorkflowRunLogs : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" , { } , {
renamed : [ "actions" , "downloadWorkflowRunLogs" ]
} ] ,
listWorkflowRuns : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" ] ,
reRunWorkflow : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun" ] ,
removeSelectedRepoFromOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ] ,
removeSelfHostedRunner : [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" , { } , {
renamed : [ "actions" , "deleteSelfHostedRunnerFromRepo" ]
} ] ,
setSelectedReposForOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" ]
} ,
activity : {
checkRepoIsStarredByAuthenticatedUser : [ "GET /user/starred/{owner}/{repo}" ] ,
checkStarringRepo : [ "GET /user/starred/{owner}/{repo}" , { } , {
renamed : [ "activity" , "checkRepoIsStarredByAuthenticatedUser" ]
} ] ,
deleteRepoSubscription : [ "DELETE /repos/{owner}/{repo}/subscription" ] ,
deleteThreadSubscription : [ "DELETE /notifications/threads/{thread_id}/subscription" ] ,
getFeeds : [ "GET /feeds" ] ,
getRepoSubscription : [ "GET /repos/{owner}/{repo}/subscription" ] ,
getThread : [ "GET /notifications/threads/{thread_id}" ] ,
getThreadSubscription : [ "PUT /notifications" , { } , {
renamed : [ "activity" , "getThreadSubscriptionForAuthenticatedUser" ]
} ] ,
getThreadSubscriptionForAuthenticatedUser : [ "GET /notifications/threads/{thread_id}/subscription" ] ,
listEventsForAuthenticatedUser : [ "GET /users/{username}/events" ] ,
listEventsForOrg : [ "GET /users/{username}/events/orgs/{org}" , { } , {
renamed : [ "activity" , "listOrgEventsForAuthenticatedUser" ]
} ] ,
listEventsForUser : [ "GET /users/{username}/events" , { } , {
renamed : [ "activity" , "listEventsForAuthenticatedUser" ]
} ] ,
listFeeds : [ "GET /feeds" , { } , {
renamed : [ "activity" , "getFeeds" ]
} ] ,
listNotifications : [ "GET /notifications" , { } , {
renamed : [ "activity" , "listNotificationsForAuthenticatedUser" ]
} ] ,
listNotificationsForAuthenticatedUser : [ "GET /notifications" ] ,
listNotificationsForRepo : [ "GET /repos/{owner}/{repo}/notifications" , { } , {
renamed : [ "activity" , "listRepoNotificationsForAuthenticatedUser" ]
} ] ,
listOrgEventsForAuthenticatedUser : [ "GET /users/{username}/events/orgs/{org}" ] ,
listPublicEvents : [ "GET /events" ] ,
listPublicEventsForOrg : [ "GET /orgs/{org}/events" , { } , {
renamed : [ "activity" , "listPublicOrgEvents" ]
} ] ,
listPublicEventsForRepoNetwork : [ "GET /networks/{owner}/{repo}/events" ] ,
listPublicEventsForUser : [ "GET /users/{username}/events/public" ] ,
listPublicOrgEvents : [ "GET /orgs/{org}/events" ] ,
listReceivedEventsForUser : [ "GET /users/{username}/received_events" ] ,
listReceivedPublicEventsForUser : [ "GET /users/{username}/received_events/public" ] ,
listRepoEvents : [ "GET /repos/{owner}/{repo}/events" ] ,
listRepoNotificationsForAuthenticatedUser : [ "GET /repos/{owner}/{repo}/notifications" ] ,
listReposStarredByAuthenticatedUser : [ "GET /user/starred" ] ,
listReposStarredByUser : [ "GET /users/{username}/starred" ] ,
listReposWatchedByUser : [ "GET /users/{username}/subscriptions" ] ,
listStargazersForRepo : [ "GET /repos/{owner}/{repo}/stargazers" ] ,
listWatchedReposForAuthenticatedUser : [ "GET /user/subscriptions" ] ,
listWatchersForRepo : [ "GET /repos/{owner}/{repo}/subscribers" ] ,
markAsRead : [ "PUT /notifications" , { } , {
renamed : [ "activity" , "markNotificationsAsRead" ]
} ] ,
markNotificationsAsRead : [ "PUT /notifications" ] ,
markNotificationsAsReadForRepo : [ "PUT /repos/{owner}/{repo}/notifications" , { } , {
renamed : [ "activity" , "markRepoNotificationsAsRead" ]
} ] ,
markRepoNotificationsAsRead : [ "PUT /repos/{owner}/{repo}/notifications" ] ,
markThreadAsRead : [ "PATCH /notifications/threads/{thread_id}" ] ,
setRepoSubscription : [ "PUT /repos/{owner}/{repo}/subscription" ] ,
setThreadSubscription : [ "PUT /notifications/threads/{thread_id}/subscription" ] ,
starRepo : [ "PUT /user/starred/{owner}/{repo}" , { } , {
renamed : [ "activity" , "starRepoForAuthenticatedUser" ]
} ] ,
starRepoForAuthenticatedUser : [ "PUT /user/starred/{owner}/{repo}" ] ,
unstarRepo : [ "DELETE /user/starred/{owner}/{repo}" , { } , {
renamed : [ "activity" , "unstarRepoForAuthenticatedUser" ]
} ] ,
unstarRepoForAuthenticatedUser : [ "DELETE /user/starred/{owner}/{repo}" ]
} ,
apps : {
addRepoToInstallation : [ "PUT /user/installations/{installation_id}/repositories/{repository_id}" , {
mediaType : {
previews : [ "machine-man" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
checkAccountIsAssociatedWithAny : [ "GET /marketplace_listing/accounts/{account_id}" , { } , {
renamed : [ "apps" , "getSubscriptionPlanForAccount" ]
} ] ,
checkAccountIsAssociatedWithAnyStubbed : [ "GET /marketplace_listing/stubbed/accounts/{account_id}" , { } , {
renamed : [ "apps" , "getSubscriptionPlanForAccountStubbed" ]
} ] ,
checkToken : [ "POST /applications/{client_id}/token" ] ,
createContentAttachment : [ "POST /content_references/{content_reference_id}/attachments" , {
mediaType : {
previews : [ "corsair" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
createFromManifest : [ "POST /app-manifests/{code}/conversions" ] ,
createInstallationToken : [ "POST /app/installations/{installation_id}/access_tokens" , {
mediaType : {
previews : [ "machine-man" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
deleteAuthorization : [ "DELETE /applications/{client_id}/grant" ] ,
deleteInstallation : [ "DELETE /app/installations/{installation_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
deleteToken : [ "DELETE /applications/{client_id}/token" ] ,
getAuthenticated : [ "GET /app" , {
mediaType : {
previews : [ "machine-man" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
getBySlug : [ "GET /apps/{app_slug}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getInstallation : [ "GET /app/installations/{installation_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getOrgInstallation : [ "GET /orgs/{org}/installation" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getRepoInstallation : [ "GET /repos/{owner}/{repo}/installation" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getSubscriptionPlanForAccount : [ "GET /marketplace_listing/accounts/{account_id}" ] ,
getSubscriptionPlanForAccountStubbed : [ "GET /marketplace_listing/stubbed/accounts/{account_id}" ] ,
getUserInstallation : [ "GET /users/{username}/installation" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listAccountsForPlan : [ "GET /marketplace_listing/plans/{plan_id}/accounts" ] ,
listAccountsForPlanStubbed : [ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" ] ,
listAccountsUserOrOrgOnPlan : [ "GET /marketplace_listing/plans/{plan_id}/accounts" , { } , {
renamed : [ "apps" , "listAccountsForPlan" ]
} ] ,
listAccountsUserOrOrgOnPlanStubbed : [ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" , { } , {
renamed : [ "apps" , "listAccountsForPlanStubbed" ]
} ] ,
listInstallationReposForAuthenticatedUser : [ "GET /user/installations/{installation_id}/repositories" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listInstallations : [ "GET /app/installations" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listInstallationsForAuthenticatedUser : [ "GET /user/installations" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listMarketplacePurchasesForAuthenticatedUser : [ "GET /user/marketplace_purchases" , { } , {
renamed : [ "apps" , "listSubscriptionsForAuthenticatedUser" ]
} ] ,
listMarketplacePurchasesForAuthenticatedUserStubbed : [ "GET /user/marketplace_purchases/stubbed" , { } , {
renamed : [ "apps" , "listSubscriptionsForAuthenticatedUserStubbed" ]
} ] ,
listPlans : [ "GET /marketplace_listing/plans" ] ,
listPlansStubbed : [ "GET /marketplace_listing/stubbed/plans" ] ,
listRepos : [ "GET /installation/repositories" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listSubscriptionsForAuthenticatedUser : [ "GET /user/marketplace_purchases" ] ,
listSubscriptionsForAuthenticatedUserStubbed : [ "GET /user/marketplace_purchases/stubbed" ] ,
removeRepoFromInstallation : [ "DELETE /user/installations/{installation_id}/repositories/{repository_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
resetToken : [ "PATCH /applications/{client_id}/token" ] ,
revokeInstallationToken : [ "DELETE /installation/token" ] ,
suspendInstallation : [ "PUT /app/installations/{installation_id}/suspended" ] ,
unsuspendInstallation : [ "DELETE /app/installations/{installation_id}/suspended" ]
} ,
checks : {
create : [ "POST /repos/{owner}/{repo}/check-runs" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
createSuite : [ "POST /repos/{owner}/{repo}/check-suites" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
get : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
getSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}" , {
mediaType : {
previews : [ "antiope" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listAnnotations : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" , {
mediaType : {
previews : [ "antiope" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
listForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-runs" , {
mediaType : {
previews : [ "antiope" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" , {
mediaType : {
previews : [ "antiope" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listSuitesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-suites" , {
mediaType : {
previews : [ "antiope" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
rerequestSuite : [ "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" , {
mediaType : {
previews : [ "antiope" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
setSuitesPreferences : [ "PATCH /repos/{owner}/{repo}/check-suites/preferences" , {
mediaType : {
previews : [ "antiope" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
update : [ "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}" , {
mediaType : {
previews : [ "antiope" ]
}
} ]
} ,
codeScanning : {
getAlert : [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/code-scanning/alerts" ]
} ,
codesOfConduct : {
getAllCodesOfConduct : [ "GET /codes_of_conduct" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
getConductCode : [ "GET /codes_of_conduct/{key}" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
getForRepo : [ "GET /repos/{owner}/{repo}/community/code_of_conduct" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
listConductCodes : [ "GET /codes_of_conduct" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} , {
renamed : [ "codesOfConduct" , "getAllCodesOfConduct" ]
} ]
2020-06-06 12:12:17 +09:00
} ,
2020-08-22 14:57:57 +09:00
emojis : {
get : [ "GET /emojis" ]
} ,
gists : {
checkIsStarred : [ "GET /gists/{gist_id}/star" ] ,
create : [ "POST /gists" ] ,
createComment : [ "POST /gists/{gist_id}/comments" ] ,
delete : [ "DELETE /gists/{gist_id}" ] ,
deleteComment : [ "DELETE /gists/{gist_id}/comments/{comment_id}" ] ,
fork : [ "POST /gists/{gist_id}/forks" ] ,
get : [ "GET /gists/{gist_id}" ] ,
getComment : [ "GET /gists/{gist_id}/comments/{comment_id}" ] ,
getRevision : [ "GET /gists/{gist_id}/{sha}" ] ,
list : [ "GET /gists" ] ,
listComments : [ "GET /gists/{gist_id}/comments" ] ,
listCommits : [ "GET /gists/{gist_id}/commits" ] ,
listForUser : [ "GET /users/{username}/gists" ] ,
listForks : [ "GET /gists/{gist_id}/forks" ] ,
listPublic : [ "GET /gists/public" ] ,
listPublicForUser : [ "GET /users/{username}/gists" , { } , {
renamed : [ "gists" , "listForUser" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
listStarred : [ "GET /gists/starred" ] ,
star : [ "PUT /gists/{gist_id}/star" ] ,
unstar : [ "DELETE /gists/{gist_id}/star" ] ,
update : [ "PATCH /gists/{gist_id}" ] ,
updateComment : [ "PATCH /gists/{gist_id}/comments/{comment_id}" ]
} ,
git : {
createBlob : [ "POST /repos/{owner}/{repo}/git/blobs" ] ,
createCommit : [ "POST /repos/{owner}/{repo}/git/commits" ] ,
createRef : [ "POST /repos/{owner}/{repo}/git/refs" ] ,
createTag : [ "POST /repos/{owner}/{repo}/git/tags" ] ,
createTree : [ "POST /repos/{owner}/{repo}/git/trees" ] ,
deleteRef : [ "DELETE /repos/{owner}/{repo}/git/refs/{ref}" ] ,
getBlob : [ "GET /repos/{owner}/{repo}/git/blobs/{file_sha}" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/git/commits/{commit_sha}" ] ,
getRef : [ "GET /repos/{owner}/{repo}/git/ref/{ref}" ] ,
getTag : [ "GET /repos/{owner}/{repo}/git/tags/{tag_sha}" ] ,
getTree : [ "GET /repos/{owner}/{repo}/git/trees/{tree_sha}" ] ,
listMatchingRefs : [ "GET /repos/{owner}/{repo}/git/matching-refs/{ref}" ] ,
updateRef : [ "PATCH /repos/{owner}/{repo}/git/refs/{ref}" ]
} ,
gitignore : {
getTemplate : [ "GET /gitignore/templates/{name}" ] ,
listTemplates : [ "GET /gitignore/templates" ]
} ,
interactions : {
addOrUpdateRestrictionsForOrg : [ "PUT /orgs/{org}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
addOrUpdateRestrictionsForRepo : [ "PUT /repos/{owner}/{repo}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
getRestrictionsForOrg : [ "GET /orgs/{org}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
getRestrictionsForRepo : [ "GET /repos/{owner}/{repo}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
removeRestrictionsForOrg : [ "DELETE /orgs/{org}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
removeRestrictionsForRepo : [ "DELETE /repos/{owner}/{repo}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ]
} ,
issues : {
addAssignees : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" ] ,
addLabels : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
checkAssignee : [ "GET /repos/{owner}/{repo}/assignees/{assignee}" , { } , {
renamed : [ "issues" , "checkUserCanBeAssigned" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
checkUserCanBeAssigned : [ "GET /repos/{owner}/{repo}/assignees/{assignee}" ] ,
create : [ "POST /repos/{owner}/{repo}/issues" ] ,
createComment : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
createLabel : [ "POST /repos/{owner}/{repo}/labels" ] ,
createMilestone : [ "POST /repos/{owner}/{repo}/milestones" ] ,
deleteComment : [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
deleteLabel : [ "DELETE /repos/{owner}/{repo}/labels/{name}" ] ,
deleteMilestone : [ "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
get : [ "GET /repos/{owner}/{repo}/issues/{issue_number}" ] ,
getComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
getEvent : [ "GET /repos/{owner}/{repo}/issues/events/{event_id}" ] ,
getLabel : [ "GET /repos/{owner}/{repo}/labels/{name}" ] ,
getMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
list : [ "GET /issues" ] ,
listAssignees : [ "GET /repos/{owner}/{repo}/assignees" ] ,
listComments : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
listCommentsForRepo : [ "GET /repos/{owner}/{repo}/issues/comments" ] ,
listEvents : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/events" ] ,
listEventsForRepo : [ "GET /repos/{owner}/{repo}/issues/events" ] ,
listEventsForTimeline : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" , {
mediaType : {
previews : [ "mockingbird" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
listForAuthenticatedUser : [ "GET /user/issues" ] ,
listForOrg : [ "GET /orgs/{org}/issues" ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/issues" ] ,
listLabelsForMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" ] ,
listLabelsForRepo : [ "GET /repos/{owner}/{repo}/labels" ] ,
listLabelsOnIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
listMilestones : [ "GET /repos/{owner}/{repo}/milestones" ] ,
listMilestonesForRepo : [ "GET /repos/{owner}/{repo}/milestones" , { } , {
renamed : [ "issues" , "listMilestones" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
lock : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
removeAllLabels : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
removeAssignees : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" ] ,
removeLabel : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" ] ,
removeLabels : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" , { } , {
renamed : [ "issues" , "removeAllLabels" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
replaceLabels : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels" , { } , {
renamed : [ "issues" , "replaceAllLabels" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
setLabels : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
unlock : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
update : [ "PATCH /repos/{owner}/{repo}/issues/{issue_number}" ] ,
updateComment : [ "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
updateLabel : [ "PATCH /repos/{owner}/{repo}/labels/{name}" ] ,
updateMilestone : [ "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" ]
2020-06-06 12:12:17 +09:00
} ,
2020-08-22 14:57:57 +09:00
licenses : {
get : [ "GET /licenses/{license}" ] ,
getForRepo : [ "GET /repos/{owner}/{repo}/license" ] ,
listCommonlyUsed : [ "GET /licenses" ]
} ,
markdown : {
render : [ "POST /markdown" ] ,
renderRaw : [ "POST /markdown/raw" , {
headers : {
"content-type" : "text/plain; charset=utf-8"
}
} ]
} ,
meta : {
get : [ "GET /meta" ]
} ,
migrations : {
cancelImport : [ "DELETE /repos/{owner}/{repo}/import" ] ,
deleteArchiveForAuthenticatedUser : [ "DELETE /user/migrations/{migration_id}/archive" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
deleteArchiveForOrg : [ "DELETE /orgs/{org}/migrations/{migration_id}/archive" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
downloadArchiveForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/archive" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
getArchiveForAuthenticatedUser : [ "GET /user/migrations/{migration_id}/archive" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
getCommitAuthors : [ "GET /repos/{owner}/{repo}/import/authors" ] ,
getImportProgress : [ "GET /repos/{owner}/{repo}/import" , { } , {
renamed : [ "migrations" , "getImportStatus" ]
} ] ,
getImportStatus : [ "GET /repos/{owner}/{repo}/import" ] ,
getLargeFiles : [ "GET /repos/{owner}/{repo}/import/large_files" ] ,
getStatusForAuthenticatedUser : [ "GET /user/migrations/{migration_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
getStatusForOrg : [ "GET /orgs/{org}/migrations/{migration_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForAuthenticatedUser : [ "GET /user/migrations" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForOrg : [ "GET /orgs/{org}/migrations" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listReposForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/repositories" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listReposForUser : [ "GET /user/{migration_id}/repositories" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
mapCommitAuthor : [ "PATCH /repos/{owner}/{repo}/import/authors/{author_id}" ] ,
setLfsPreference : [ "PATCH /repos/{owner}/{repo}/import/lfs" ] ,
startForAuthenticatedUser : [ "POST /user/migrations" ] ,
startForOrg : [ "POST /orgs/{org}/migrations" ] ,
startImport : [ "PUT /repos/{owner}/{repo}/import" ] ,
unlockRepoForAuthenticatedUser : [ "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
unlockRepoForOrg : [ "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "wyandotte" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
updateImport : [ "PATCH /repos/{owner}/{repo}/import" ]
} ,
orgs : {
addOrUpdateMembership : [ "PUT /orgs/{org}/memberships/{username}" ] ,
blockUser : [ "PUT /orgs/{org}/blocks/{username}" ] ,
checkBlockedUser : [ "GET /orgs/{org}/blocks/{username}" ] ,
checkMembership : [ "GET /orgs/{org}/members/{username}" ] ,
checkPublicMembership : [ "GET /orgs/{org}/public_members/{username}" ] ,
concealMembership : [ "DELETE /orgs/{org}/public_members/{username}" ] ,
convertMemberToOutsideCollaborator : [ "PUT /orgs/{org}/outside_collaborators/{username}" ] ,
createHook : [ "POST /orgs/{org}/hooks" ] ,
createInvitation : [ "POST /orgs/{org}/invitations" ] ,
deleteHook : [ "DELETE /orgs/{org}/hooks/{hook_id}" ] ,
get : [ "GET /orgs/{org}" ] ,
getHook : [ "GET /orgs/{org}/hooks/{hook_id}" ] ,
getMembership : [ "GET /orgs/{org}/memberships/{username}" ] ,
getMembershipForAuthenticatedUser : [ "GET /user/memberships/orgs/{org}" ] ,
list : [ "GET /organizations" ] ,
listBlockedUsers : [ "GET /orgs/{org}/blocks" ] ,
listForAuthenticatedUser : [ "GET /user/orgs" ] ,
listForUser : [ "GET /users/{username}/orgs" ] ,
listHooks : [ "GET /orgs/{org}/hooks" ] ,
listInstallations : [ "GET /orgs/{org}/installations" , {
2020-06-06 12:12:17 +09:00
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
2020-08-22 14:57:57 +09:00
listInvitationTeams : [ "GET /orgs/{org}/invitations/{invitation_id}/teams" ] ,
listMembers : [ "GET /orgs/{org}/members" ] ,
listMemberships : [ "GET /user/memberships/orgs" ] ,
listOutsideCollaborators : [ "GET /orgs/{org}/outside_collaborators" ] ,
listPendingInvitations : [ "GET /orgs/{org}/invitations" ] ,
listPublicMembers : [ "GET /orgs/{org}/public_members" ] ,
pingHook : [ "POST /orgs/{org}/hooks/{hook_id}/pings" ] ,
publicizeMembership : [ "PUT /orgs/{org}/public_members/{username}" ] ,
removeMember : [ "DELETE /orgs/{org}/members/{username}" ] ,
removeMembership : [ "DELETE /orgs/{org}/memberships/{username}" ] ,
removeOutsideCollaborator : [ "DELETE /orgs/{org}/outside_collaborators/{username}" ] ,
unblockUser : [ "DELETE /orgs/{org}/blocks/{username}" ] ,
update : [ "PATCH /orgs/{org}" ] ,
updateHook : [ "PATCH /orgs/{org}/hooks/{hook_id}" ] ,
updateMembership : [ "PATCH /user/memberships/orgs/{org}" ]
} ,
projects : {
addCollaborator : [ "PUT /projects/{project_id}/collaborators/{username}" , {
mediaType : {
previews : [ "inertia" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
createCard : [ "POST /projects/columns/{column_id}/cards" , {
mediaType : {
previews : [ "inertia" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
createColumn : [ "POST /projects/{project_id}/columns" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
createForAuthenticatedUser : [ "POST /user/projects" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
createForOrg : [ "POST /orgs/{org}/projects" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
createForRepo : [ "POST /repos/{owner}/{repo}/projects" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
delete : [ "DELETE /projects/{project_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
deleteCard : [ "DELETE /projects/columns/cards/{card_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
deleteColumn : [ "DELETE /projects/columns/{column_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
get : [ "GET /projects/{project_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
getCard : [ "GET /projects/columns/cards/{card_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
getColumn : [ "GET /projects/columns/{column_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listCards : [ "GET /projects/columns/{column_id}/cards" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listCollaborators : [ "GET /projects/{project_id}/collaborators" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listColumns : [ "GET /projects/{project_id}/columns" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} ] ,
listForOrg : [ "GET /orgs/{org}/projects" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForRepo : [ "GET /repos/{owner}/{repo}/projects" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForUser : [ "GET /users/{username}/projects" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
moveCard : [ "POST /projects/columns/cards/{card_id}/moves" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
moveColumn : [ "POST /projects/columns/{column_id}/moves" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
removeCollaborator : [ "DELETE /projects/{project_id}/collaborators/{username}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
reviewUserPermissionLevel : [ "GET /projects/{project_id}/collaborators/{username}/permission" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
update : [ "PATCH /projects/{project_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
updateCard : [ "PATCH /projects/columns/cards/{card_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
updateColumn : [ "PATCH /projects/columns/{column_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ]
} ,
2020-08-22 14:57:57 +09:00
pulls : {
checkIfMerged : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
create : [ "POST /repos/{owner}/{repo}/pulls" ] ,
createComment : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" ] ,
createReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
createReviewCommentReply : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" ] ,
createReviewRequest : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
deleteComment : [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
deletePendingReview : [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
deleteReviewRequest : [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
dismissReview : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" ] ,
get : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
getComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
getCommentsForReview : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" ] ,
getReview : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
list : [ "GET /repos/{owner}/{repo}/pulls" ] ,
listComments : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" ] ,
listCommentsForRepo : [ "GET /repos/{owner}/{repo}/pulls/comments" ] ,
listCommits : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits" ] ,
listFiles : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files" ] ,
listReviewRequests : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
listReviews : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
merge : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
submitReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" ] ,
update : [ "PATCH /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
updateBranch : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "lydian" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
updateComment : [ "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
updateReview : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ]
} ,
rateLimit : {
get : [ "GET /rate_limit" ]
} ,
reactions : {
createForCommitComment : [ "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
createForIssue : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
createForIssueComment : [ "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
createForPullRequestReviewComment : [ "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} ] ,
createForTeamDiscussionCommentInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
createForTeamDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
delete : [ "DELETE /reactions/{reaction_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} , {
renamed : [ "reactions" , "deleteLegacy" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
deleteForCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
deleteForIssue : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
deleteForIssueComment : [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
deleteForPullRequestComment : [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
deleteForTeamDiscussion : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
deleteForTeamDiscussionComment : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
deleteLegacy : [ "DELETE /reactions/{reaction_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} , {
deprecated : "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy"
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
listForCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForIssueComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForPullRequestReviewComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForTeamDiscussionCommentInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listForTeamDiscussionInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "squirrel-girl" ]
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} ]
} ,
repos : {
acceptInvitation : [ "PATCH /user/repository_invitations/{invitation_id}" ] ,
addCollaborator : [ "PUT /repos/{owner}/{repo}/collaborators/{username}" ] ,
addDeployKey : [ "POST /repos/{owner}/{repo}/keys" ] ,
addProtectedBranchAdminEnforcement : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
addProtectedBranchAppRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
addProtectedBranchRequiredSignatures : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "zzzax" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
addProtectedBranchRequiredStatusChecksContexts : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
addProtectedBranchTeamRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
addProtectedBranchUserRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
checkCollaborator : [ "GET /repos/{owner}/{repo}/collaborators/{username}" ] ,
checkVulnerabilityAlerts : [ "GET /repos/{owner}/{repo}/vulnerability-alerts" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "dorian" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
compareCommits : [ "GET /repos/{owner}/{repo}/compare/{base}...{head}" ] ,
createCommitComment : [ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" ] ,
createDeployment : [ "POST /repos/{owner}/{repo}/deployments" ] ,
createDeploymentStatus : [ "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ] ,
createDispatchEvent : [ "POST /repos/{owner}/{repo}/dispatches" ] ,
createForAuthenticatedUser : [ "POST /user/repos" ] ,
createFork : [ "POST /repos/{owner}/{repo}/forks" ] ,
createHook : [ "POST /repos/{owner}/{repo}/hooks" ] ,
createInOrg : [ "POST /orgs/{org}/repos" ] ,
createOrUpdateFile : [ "PUT /repos/{owner}/{repo}/contents/{path}" ] ,
createRelease : [ "POST /repos/{owner}/{repo}/releases" ] ,
createStatus : [ "POST /repos/{owner}/{repo}/statuses/{sha}" ] ,
createUsingTemplate : [ "POST /repos/{template_owner}/{template_repo}/generate" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "baptiste" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
declineInvitation : [ "DELETE /user/repository_invitations/{invitation_id}" ] ,
delete : [ "DELETE /repos/{owner}/{repo}" ] ,
deleteCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}" ] ,
deleteDeployment : [ "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
deleteDownload : [ "DELETE /repos/{owner}/{repo}/downloads/{download_id}" ] ,
deleteFile : [ "DELETE /repos/{owner}/{repo}/contents/{path}" ] ,
deleteHook : [ "DELETE /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
deleteInvitation : [ "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" ] ,
deleteRelease : [ "DELETE /repos/{owner}/{repo}/releases/{release_id}" ] ,
deleteReleaseAsset : [ "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
disableAutomatedSecurityFixes : [ "DELETE /repos/{owner}/{repo}/automated-security-fixes" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "london" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
disablePagesSite : [ "DELETE /repos/{owner}/{repo}/pages" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "switcheroo" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
disableVulnerabilityAlerts : [ "DELETE /repos/{owner}/{repo}/vulnerability-alerts" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "dorian" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
enableAutomatedSecurityFixes : [ "PUT /repos/{owner}/{repo}/automated-security-fixes" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "london" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
enablePagesSite : [ "POST /repos/{owner}/{repo}/pages" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "switcheroo" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
enableVulnerabilityAlerts : [ "PUT /repos/{owner}/{repo}/vulnerability-alerts" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "dorian" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
get : [ "GET /repos/{owner}/{repo}" ] ,
getAllTopics : [ "GET /repos/{owner}/{repo}/topics" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "mercy" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
getAppsWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ] ,
getArchiveLink : [ "GET /repos/{owner}/{repo}/{archive_format}/{ref}" ] ,
getBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}" ] ,
getBranchProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
getClones : [ "GET /repos/{owner}/{repo}/traffic/clones" ] ,
getCodeFrequencyStats : [ "GET /repos/{owner}/{repo}/stats/code_frequency" ] ,
getCollaboratorPermissionLevel : [ "GET /repos/{owner}/{repo}/collaborators/{username}/permission" ] ,
getCombinedStatusForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/status" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/commits/{ref}" ] ,
getCommitActivityStats : [ "GET /repos/{owner}/{repo}/stats/commit_activity" ] ,
getCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}" ] ,
getContents : [ "GET /repos/{owner}/{repo}/contents/{path}" ] ,
getContributorsStats : [ "GET /repos/{owner}/{repo}/stats/contributors" ] ,
getDeployKey : [ "GET /repos/{owner}/{repo}/keys/{key_id}" ] ,
getDeployment : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
getDeploymentStatus : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" ] ,
getDownload : [ "GET /repos/{owner}/{repo}/downloads/{download_id}" ] ,
getHook : [ "GET /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
getLatestPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/latest" ] ,
getLatestRelease : [ "GET /repos/{owner}/{repo}/releases/latest" ] ,
getPages : [ "GET /repos/{owner}/{repo}/pages" ] ,
getPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/{build_id}" ] ,
getParticipationStats : [ "GET /repos/{owner}/{repo}/stats/participation" ] ,
getProtectedBranchAdminEnforcement : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
getProtectedBranchPullRequestReviewEnforcement : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
getProtectedBranchRequiredSignatures : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "zzzax" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
getProtectedBranchRequiredStatusChecks : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
getProtectedBranchRestrictions : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ] ,
getPunchCardStats : [ "GET /repos/{owner}/{repo}/stats/punch_card" ] ,
getReadme : [ "GET /repos/{owner}/{repo}/readme" ] ,
getRelease : [ "GET /repos/{owner}/{repo}/releases/{release_id}" ] ,
getReleaseAsset : [ "GET /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
getReleaseByTag : [ "GET /repos/{owner}/{repo}/releases/tags/{tag}" ] ,
getTeamsWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ] ,
getTopPaths : [ "GET /repos/{owner}/{repo}/traffic/popular/paths" ] ,
getTopReferrers : [ "GET /repos/{owner}/{repo}/traffic/popular/referrers" ] ,
getUsersWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ] ,
getViews : [ "GET /repos/{owner}/{repo}/traffic/views" ] ,
list : [ "GET /user/repos" , { } , {
renamed : [ "repos" , "listForAuthenticatedUser" ]
} ] ,
listAssetsForRelease : [ "GET /repos/{owner}/{repo}/releases/{release_id}/assets" ] ,
listBranches : [ "GET /repos/{owner}/{repo}/branches" ] ,
listBranchesForHeadCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "groot" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listCollaborators : [ "GET /repos/{owner}/{repo}/collaborators" ] ,
listCommentsForCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" ] ,
listCommitComments : [ "GET /repos/{owner}/{repo}/comments" ] ,
listCommits : [ "GET /repos/{owner}/{repo}/commits" ] ,
listContributors : [ "GET /repos/{owner}/{repo}/contributors" ] ,
listDeployKeys : [ "GET /repos/{owner}/{repo}/keys" ] ,
listDeploymentStatuses : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ] ,
listDeployments : [ "GET /repos/{owner}/{repo}/deployments" ] ,
listDownloads : [ "GET /repos/{owner}/{repo}/downloads" ] ,
listForAuthenticatedUser : [ "GET /user/repos" ] ,
listForOrg : [ "GET /orgs/{org}/repos" ] ,
listForUser : [ "GET /users/{username}/repos" ] ,
listForks : [ "GET /repos/{owner}/{repo}/forks" ] ,
listHooks : [ "GET /repos/{owner}/{repo}/hooks" ] ,
listInvitations : [ "GET /repos/{owner}/{repo}/invitations" ] ,
listInvitationsForAuthenticatedUser : [ "GET /user/repository_invitations" ] ,
listLanguages : [ "GET /repos/{owner}/{repo}/languages" ] ,
listPagesBuilds : [ "GET /repos/{owner}/{repo}/pages/builds" ] ,
listProtectedBranchRequiredStatusChecksContexts : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ] ,
listPublic : [ "GET /repositories" ] ,
listPullRequestsAssociatedWithCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "groot" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listReleases : [ "GET /repos/{owner}/{repo}/releases" ] ,
listStatusesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/statuses" ] ,
listTags : [ "GET /repos/{owner}/{repo}/tags" ] ,
listTeams : [ "GET /repos/{owner}/{repo}/teams" ] ,
listTopics : [ "GET /repos/{owner}/{repo}/topics" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "mercy" ]
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} , {
renamed : [ "repos" , "getAllTopics" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
merge : [ "POST /repos/{owner}/{repo}/merges" ] ,
pingHook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings" ] ,
removeBranchProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
removeCollaborator : [ "DELETE /repos/{owner}/{repo}/collaborators/{username}" ] ,
removeDeployKey : [ "DELETE /repos/{owner}/{repo}/keys/{key_id}" ] ,
removeProtectedBranchAdminEnforcement : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
removeProtectedBranchAppRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
removeProtectedBranchPullRequestReviewEnforcement : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
removeProtectedBranchRequiredSignatures : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "zzzax" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
removeProtectedBranchRequiredStatusChecks : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
removeProtectedBranchRequiredStatusChecksContexts : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
removeProtectedBranchRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ] ,
removeProtectedBranchTeamRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
removeProtectedBranchUserRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
replaceAllTopics : [ "PUT /repos/{owner}/{repo}/topics" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "mercy" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
replaceProtectedBranchAppRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
replaceProtectedBranchRequiredStatusChecksContexts : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
replaceProtectedBranchTeamRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
replaceProtectedBranchUserRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
replaceTopics : [ "PUT /repos/{owner}/{repo}/topics" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "mercy" ]
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} , {
renamed : [ "repos" , "replaceAllTopics" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
requestPageBuild : [ "POST /repos/{owner}/{repo}/pages/builds" ] ,
retrieveCommunityProfileMetrics : [ "GET /repos/{owner}/{repo}/community/profile" ] ,
testPushHook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests" ] ,
transfer : [ "POST /repos/{owner}/{repo}/transfer" ] ,
update : [ "PATCH /repos/{owner}/{repo}" ] ,
updateBranchProtection : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
updateCommitComment : [ "PATCH /repos/{owner}/{repo}/comments/{comment_id}" ] ,
updateHook : [ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
updateInformationAboutPagesSite : [ "PUT /repos/{owner}/{repo}/pages" ] ,
updateInvitation : [ "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" ] ,
updateProtectedBranchPullRequestReviewEnforcement : [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
updateProtectedBranchRequiredStatusChecks : [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
updateRelease : [ "PATCH /repos/{owner}/{repo}/releases/{release_id}" ] ,
updateReleaseAsset : [ "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
uploadReleaseAsset : [ "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}" , {
baseUrl : "https://uploads.github.com"
2020-06-06 12:12:17 +09:00
} ]
} ,
2020-08-22 14:57:57 +09:00
search : {
code : [ "GET /search/code" ] ,
commits : [ "GET /search/commits" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "cloak" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
issuesAndPullRequests : [ "GET /search/issues" ] ,
labels : [ "GET /search/labels" ] ,
repos : [ "GET /search/repositories" ] ,
topics : [ "GET /search/topics" ] ,
users : [ "GET /search/users" ]
2020-06-06 12:12:17 +09:00
} ,
2020-08-22 14:57:57 +09:00
teams : {
addOrUpdateMembershipForUserInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
addOrUpdateMembershipInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" , { } , {
renamed : [ "teams" , "addOrUpdateMembershipForUserInOrg" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
addOrUpdateProjectInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} , {
renamed : [ "teams" , "addOrUpdateProjectPermissionsInOrg" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
addOrUpdateProjectPermissionsInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
addOrUpdateRepoInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" , { } , {
renamed : [ "teams" , "addOrUpdateRepoPermissionsInOrg" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
addOrUpdateRepoPermissionsInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
checkManagesRepoInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" , { } , {
renamed : [ "teams" , "checkPermissionsForRepoInOrg" ]
} ] ,
checkPermissionsForProjectInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
checkPermissionsForRepoInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
create : [ "POST /orgs/{org}/teams" ] ,
createDiscussionCommentInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ] ,
createDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions" ] ,
deleteDiscussionCommentInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
deleteDiscussionInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
deleteInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}" ] ,
getByName : [ "GET /orgs/{org}/teams/{team_slug}" ] ,
getDiscussionCommentInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
getDiscussionInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
getMembershipForUserInOrg : [ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
getMembershipInOrg : [ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" , { } , {
renamed : [ "teams" , "getMembershipForUserInOrg" ]
} ] ,
list : [ "GET /orgs/{org}/teams" ] ,
listChildInOrg : [ "GET /orgs/{org}/teams/{team_slug}/teams" ] ,
listDiscussionCommentsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ] ,
listDiscussionsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions" ] ,
listForAuthenticatedUser : [ "GET /user/teams" ] ,
listMembersInOrg : [ "GET /orgs/{org}/teams/{team_slug}/members" ] ,
listPendingInvitationsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/invitations" ] ,
listProjectsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} ] ,
2020-08-22 14:57:57 +09:00
listReposInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos" ] ,
removeMembershipForUserInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
removeMembershipInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" , { } , {
renamed : [ "teams" , "removeMembershipForUserInOrg" ]
} ] ,
removeProjectInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" ] ,
removeRepoInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
reviewProjectInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
2020-06-06 12:12:17 +09:00
mediaType : {
2020-08-22 14:57:57 +09:00
previews : [ "inertia" ]
2020-06-06 12:12:17 +09:00
}
} , {
2020-08-22 14:57:57 +09:00
renamed : [ "teams" , "checkPermissionsForProjectInOrg" ]
2020-06-06 12:12:17 +09:00
} ] ,
2020-08-22 14:57:57 +09:00
updateDiscussionCommentInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
updateDiscussionInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
updateInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}" ]
} ,
users : {
addEmails : [ "POST /user/emails" ] ,
block : [ "PUT /user/blocks/{username}" ] ,
checkBlocked : [ "GET /user/blocks/{username}" ] ,
checkFollowing : [ "GET /user/following/{username}" ] ,
checkFollowingForUser : [ "GET /users/{username}/following/{target_user}" ] ,
createGpgKey : [ "POST /user/gpg_keys" ] ,
createPublicKey : [ "POST /user/keys" ] ,
deleteEmails : [ "DELETE /user/emails" ] ,
deleteGpgKey : [ "DELETE /user/gpg_keys/{gpg_key_id}" ] ,
deletePublicKey : [ "DELETE /user/keys/{key_id}" ] ,
follow : [ "PUT /user/following/{username}" ] ,
getAuthenticated : [ "GET /user" ] ,
getByUsername : [ "GET /users/{username}" ] ,
getContextForUser : [ "GET /users/{username}/hovercard" ] ,
getGpgKey : [ "GET /user/gpg_keys/{gpg_key_id}" ] ,
getPublicKey : [ "GET /user/keys/{key_id}" ] ,
list : [ "GET /users" ] ,
listBlocked : [ "GET /user/blocks" ] ,
listEmails : [ "GET /user/emails" ] ,
listFollowedByAuthenticated : [ "GET /user/following" ] ,
listFollowersForAuthenticatedUser : [ "GET /user/followers" ] ,
listFollowersForUser : [ "GET /users/{username}/followers" ] ,
listFollowingForAuthenticatedUser : [ "GET /user/following" , { } , {
renamed : [ "users" , "listFollowedByAuthenticated" ]
} ] ,
listFollowingForUser : [ "GET /users/{username}/following" ] ,
listGpgKeys : [ "GET /user/gpg_keys" ] ,
listGpgKeysForUser : [ "GET /users/{username}/gpg_keys" ] ,
listPublicEmails : [ "GET /user/public_emails" ] ,
listPublicKeys : [ "GET /user/keys" ] ,
listPublicKeysForUser : [ "GET /users/{username}/keys" ] ,
togglePrimaryEmailVisibility : [ "PATCH /user/email/visibility" ] ,
unblock : [ "DELETE /user/blocks/{username}" ] ,
unfollow : [ "DELETE /user/following/{username}" ] ,
updateAuthenticated : [ "PATCH /user" ]
}
} ;
const VERSION = "3.14.0" ;
function endpointsToMethods ( octokit , endpointsMap ) {
const newMethods = { } ;
for ( const [ scope , endpoints ] of Object . entries ( endpointsMap ) ) {
for ( const [ methodName , endpoint ] of Object . entries ( endpoints ) ) {
const [ route , defaults , decorations ] = endpoint ;
const [ method , url ] = route . split ( / / ) ;
const endpointDefaults = Object . assign ( {
method ,
url
} , defaults ) ;
if ( ! newMethods [ scope ] ) {
newMethods [ scope ] = { } ;
}
const scopeMethods = newMethods [ scope ] ;
if ( decorations ) {
scopeMethods [ methodName ] = decorate ( octokit , scope , methodName , endpointDefaults , decorations ) ;
continue ;
}
scopeMethods [ methodName ] = octokit . request . defaults ( endpointDefaults ) ;
}
}
return newMethods ;
}
function decorate ( octokit , scope , methodName , defaults , decorations ) {
const requestWithDefaults = octokit . request . defaults ( defaults ) ;
function withDecorations ( ... args ) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
let options = requestWithDefaults . endpoint . merge ( ... args ) ; // There are currently no other decorations than `.mapToData`
if ( decorations . mapToData ) {
options = Object . assign ( { } , options , {
data : options [ decorations . mapToData ] ,
[ decorations . mapToData ] : undefined
} ) ;
return requestWithDefaults ( options ) ;
} // NOTE: there are currently no deprecations. But we keep the code
// below for future reference
if ( decorations . renamed ) {
const [ newScope , newMethodName ] = decorations . renamed ;
octokit . log . warn ( ` octokit. ${ scope } . ${ methodName } () has been renamed to octokit. ${ newScope } . ${ newMethodName } () ` ) ;
}
if ( decorations . deprecated ) {
octokit . log . warn ( decorations . deprecated ) ;
}
if ( decorations . renamedParameters ) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
const options = requestWithDefaults . endpoint . merge ( ... args ) ;
for ( const [ name , alias ] of Object . entries ( decorations . renamedParameters ) ) {
// There is currently no deprecated parameter that is optional,
// so we never hit the else branch below at this point.
/* istanbul ignore else */
if ( name in options ) {
octokit . log . warn ( ` " ${ name } " parameter is deprecated for "octokit. ${ scope } . ${ methodName } ()". Use " ${ alias } " instead ` ) ;
if ( ! ( alias in options ) ) {
options [ alias ] = options [ name ] ;
}
delete options [ name ] ;
}
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
return requestWithDefaults ( options ) ;
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
return requestWithDefaults ( ... args ) ;
}
return Object . assign ( withDecorations , requestWithDefaults ) ;
}
/ * *
* This plugin is a 1 : 1 copy of internal @ octokit / rest plugins . The primary
* goal is to rebuild @ octokit / rest on top of @ octokit / core . Once that is
* done , we will remove the registerEndpoints methods and return the methods
* directly as with the other plugins . At that point we will also remove the
* legacy workarounds and deprecations .
*
* See the plan at
* https : //github.com/octokit/plugin-rest-endpoint-methods.js/pull/1
* /
function restEndpointMethods ( octokit ) {
return endpointsToMethods ( octokit , Endpoints ) ;
}
restEndpointMethods . VERSION = VERSION ;
exports . restEndpointMethods = restEndpointMethods ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 48 :
/***/ ( function ( module ) {
"use strict" ;
// See http://www.robvanderwoude.com/escapechars.php
const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g ;
function escapeCommand ( arg ) {
// Escape meta chars
arg = arg . replace ( metaCharsRegExp , '^$1' ) ;
return arg ;
}
function escapeArgument ( arg , doubleEscapeMetaChars ) {
// Convert to string
arg = ` ${ arg } ` ;
// Algorithm below is based on https://qntm.org/cmd
// Sequence of backslashes followed by a double quote:
// double up all the backslashes and escape the double quote
arg = arg . replace ( /(\\*)"/g , '$1$1\\"' ) ;
// Sequence of backslashes followed by the end of the string
// (which will become a double quote later):
// double up all the backslashes
arg = arg . replace ( /(\\*)$/ , '$1$1' ) ;
// All other backslashes occur literally
// Quote the whole thing:
arg = ` " ${ arg } " ` ;
// Escape meta chars
arg = arg . replace ( metaCharsRegExp , '^$1' ) ;
// Double escape meta chars if necessary
if ( doubleEscapeMetaChars ) {
arg = arg . replace ( metaCharsRegExp , '^$1' ) ;
}
return arg ;
}
module . exports . command = escapeCommand ;
module . exports . argument = escapeArgument ;
/***/ } ) ,
/***/ 53 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . Context = void 0 ;
const fs _1 = _ _webpack _require _ _ ( 747 ) ;
const os _1 = _ _webpack _require _ _ ( 87 ) ;
class Context {
/ * *
* Hydrate the context from the environment
* /
constructor ( ) {
this . payload = { } ;
if ( process . env . GITHUB _EVENT _PATH ) {
if ( fs _1 . existsSync ( process . env . GITHUB _EVENT _PATH ) ) {
this . payload = JSON . parse ( fs _1 . readFileSync ( process . env . GITHUB _EVENT _PATH , { encoding : 'utf8' } ) ) ;
}
else {
const path = process . env . GITHUB _EVENT _PATH ;
process . stdout . write ( ` GITHUB_EVENT_PATH ${ path } does not exist ${ os _1 . EOL } ` ) ;
}
}
this . eventName = process . env . GITHUB _EVENT _NAME ;
this . sha = process . env . GITHUB _SHA ;
this . ref = process . env . GITHUB _REF ;
this . workflow = process . env . GITHUB _WORKFLOW ;
this . action = process . env . GITHUB _ACTION ;
this . actor = process . env . GITHUB _ACTOR ;
}
get issue ( ) {
const payload = this . payload ;
return Object . assign ( Object . assign ( { } , this . repo ) , { number : ( payload . issue || payload . pull _request || payload ) . number } ) ;
}
get repo ( ) {
if ( process . env . GITHUB _REPOSITORY ) {
const [ owner , repo ] = process . env . GITHUB _REPOSITORY . split ( '/' ) ;
return { owner , repo } ;
}
if ( this . payload . repository ) {
return {
owner : this . payload . repository . owner . login ,
repo : this . payload . repository . name
} ;
}
throw new Error ( "context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'" ) ;
}
}
exports . Context = Context ;
//# sourceMappingURL=context.js.map
/***/ } ) ,
/***/ 87 :
/***/ ( function ( module ) {
module . exports = require ( "os" ) ;
/***/ } ) ,
/***/ 101 :
/***/ ( function ( module ) {
"use strict" ;
const isWin = process . platform === 'win32' ;
function notFoundError ( original , syscall ) {
return Object . assign ( new Error ( ` ${ syscall } ${ original . command } ENOENT ` ) , {
code : 'ENOENT' ,
errno : 'ENOENT' ,
syscall : ` ${ syscall } ${ original . command } ` ,
path : original . command ,
spawnargs : original . args ,
} ) ;
}
function hookChildProcess ( cp , parsed ) {
if ( ! isWin ) {
return ;
}
const originalEmit = cp . emit ;
cp . emit = function ( name , arg1 ) {
// If emitting "exit" event and exit code is 1, we need to check if
// the command exists and emit an "error" instead
// See https://github.com/IndigoUnited/node-cross-spawn/issues/16
if ( name === 'exit' ) {
const err = verifyENOENT ( arg1 , parsed , 'spawn' ) ;
if ( err ) {
return originalEmit . call ( cp , 'error' , err ) ;
}
}
return originalEmit . apply ( cp , arguments ) ; // eslint-disable-line prefer-rest-params
} ;
}
function verifyENOENT ( status , parsed ) {
if ( isWin && status === 1 && ! parsed . file ) {
return notFoundError ( parsed . original , 'spawn' ) ;
}
return null ;
}
function verifyENOENTSync ( status , parsed ) {
if ( isWin && status === 1 && ! parsed . file ) {
return notFoundError ( parsed . original , 'spawnSync' ) ;
}
return null ;
}
module . exports = {
hookChildProcess ,
verifyENOENT ,
verifyENOENTSync ,
notFoundError ,
} ;
/***/ } ) ,
/***/ 126 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var fs = _ _webpack _require _ _ ( 747 )
var core
if ( process . platform === 'win32' || global . TESTING _WINDOWS ) {
core = _ _webpack _require _ _ ( 1 )
} else {
core = _ _webpack _require _ _ ( 728 )
}
module . exports = isexe
isexe . sync = sync
function isexe ( path , options , cb ) {
if ( typeof options === 'function' ) {
cb = options
options = { }
}
if ( ! cb ) {
if ( typeof Promise !== 'function' ) {
throw new TypeError ( 'callback not provided' )
}
return new Promise ( function ( resolve , reject ) {
isexe ( path , options || { } , function ( er , is ) {
if ( er ) {
reject ( er )
} else {
resolve ( is )
}
} )
} )
}
core ( path , options || { } , function ( er , is ) {
// ignore EACCES because that just means we aren't allowed to run it
if ( er ) {
if ( er . code === 'EACCES' || options && options . ignoreErrors ) {
er = null
is = false
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
}
cb ( er , is )
} )
}
function sync ( path , options ) {
// my kingdom for a filtered catch
try {
return core . sync ( path , options || { } )
} catch ( er ) {
if ( options && options . ignoreErrors || er . code === 'EACCES' ) {
return false
} else {
throw er
}
}
}
/***/ } ) ,
/***/ 129 :
/***/ ( function ( module ) {
module . exports = require ( "child_process" ) ;
/***/ } ) ,
/***/ 166 :
/***/ ( function ( module ) {
"use strict" ;
const alias = [ 'stdin' , 'stdout' , 'stderr' ] ;
const hasAlias = opts => alias . some ( x => Boolean ( opts [ x ] ) ) ;
module . exports = opts => {
if ( ! opts ) {
return null ;
}
if ( opts . stdio && hasAlias ( opts ) ) {
throw new Error ( ` It's not possible to provide \` stdio \` in combination with one of ${ alias . map ( x => ` \` ${ x } \` ` ) . join ( ', ' ) } ` ) ;
}
if ( typeof opts . stdio === 'string' ) {
return opts . stdio ;
}
const stdio = opts . stdio || [ ] ;
if ( ! Array . isArray ( stdio ) ) {
throw new TypeError ( ` Expected \` stdio \` to be of type \` string \` or \` Array \` , got \` ${ typeof stdio } \` ` ) ;
}
const result = [ ] ;
const len = Math . max ( stdio . length , alias . length ) ;
for ( let i = 0 ; i < len ; i ++ ) {
let value = null ;
if ( stdio [ i ] !== undefined ) {
value = stdio [ i ] ;
} else if ( opts [ alias [ i ] ] !== undefined ) {
value = opts [ alias [ i ] ] ;
}
result [ i ] = value ;
}
return result ;
} ;
/***/ } ) ,
/***/ 186 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const command _1 = _ _webpack _require _ _ ( 351 ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
const convertedVal = command _1 . toCommandValue ( val ) ;
process . env [ name ] = convertedVal ;
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
* Gets the value of an input . The value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
return val . trim ( ) ;
}
exports . getInput = getInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
command _1 . issueCommand ( 'set-output' , { name } , value ) ;
}
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
* /
function error ( message ) {
command _1 . issue ( 'error' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . error = error ;
/ * *
* Adds an warning issue
* @ param message warning issue message . Errors will be converted to string via toString ( )
* /
function warning ( message ) {
command _1 . issue ( 'warning' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . warning = warning ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
command _1 . issueCommand ( 'save-state' , { name } , value ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
//# sourceMappingURL=core.js.map
/***/ } ) ,
/***/ 193 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
const VERSION = "2.2.1" ;
/ * *
* Some “ list ” response that can be paginated have a different response structure
*
* They have a ` total_count ` key in the response ( search also has ` incomplete_results ` ,
* / i n s t a l l a t i o n / r e p o s i t o r i e s a l s o h a s ` r e p o s i t o r y _ s e l e c t i o n ` ) , a s w e l l a s a k e y w i t h
* the list of the items which name varies from endpoint to endpoint .
*
* Octokit normalizes these responses so that paginated results are always returned following
* the same structure . One challenge is that if the list response has only one page , no Link
* header is provided , so this header alone is not sufficient to check wether a response is
* paginated or not .
*
* We check if a "total_count" key is present in the response data , but also make sure that
* a "url" property is not , as the "Get the combined status for a specific ref" endpoint would
* otherwise match : https : //developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
* /
function normalizePaginatedListResponse ( response ) {
const responseNeedsNormalization = "total_count" in response . data && ! ( "url" in response . data ) ;
if ( ! responseNeedsNormalization ) return response ; // keep the additional properties intact as there is currently no other way
// to retrieve the same information.
const incompleteResults = response . data . incomplete _results ;
const repositorySelection = response . data . repository _selection ;
const totalCount = response . data . total _count ;
delete response . data . incomplete _results ;
delete response . data . repository _selection ;
delete response . data . total _count ;
const namespaceKey = Object . keys ( response . data ) [ 0 ] ;
const data = response . data [ namespaceKey ] ;
response . data = data ;
if ( typeof incompleteResults !== "undefined" ) {
response . data . incomplete _results = incompleteResults ;
}
if ( typeof repositorySelection !== "undefined" ) {
response . data . repository _selection = repositorySelection ;
}
response . data . total _count = totalCount ;
return response ;
}
function iterator ( octokit , route , parameters ) {
const options = typeof route === "function" ? route . endpoint ( parameters ) : octokit . request . endpoint ( route , parameters ) ;
const requestMethod = typeof route === "function" ? route : octokit . request ;
const method = options . method ;
const headers = options . headers ;
let url = options . url ;
return {
[ Symbol . asyncIterator ] : ( ) => ( {
next ( ) {
if ( ! url ) {
return Promise . resolve ( {
done : true
} ) ;
}
return requestMethod ( {
method ,
url ,
headers
} ) . then ( normalizePaginatedListResponse ) . then ( response => {
// `response.headers.link` format:
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
// sets `url` to undefined if "next" URL is not present or `link` header is not set
url = ( ( response . headers . link || "" ) . match ( /<([^>]+)>;\s*rel="next"/ ) || [ ] ) [ 1 ] ;
return {
value : response
} ;
} ) ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} )
} ;
}
function paginate ( octokit , route , parameters , mapFn ) {
if ( typeof parameters === "function" ) {
mapFn = parameters ;
parameters = undefined ;
}
return gather ( octokit , [ ] , iterator ( octokit , route , parameters ) [ Symbol . asyncIterator ] ( ) , mapFn ) ;
}
function gather ( octokit , results , iterator , mapFn ) {
return iterator . next ( ) . then ( result => {
if ( result . done ) {
return results ;
}
let earlyExit = false ;
function done ( ) {
earlyExit = true ;
}
results = results . concat ( mapFn ? mapFn ( result . value , done ) : result . value . data ) ;
if ( earlyExit ) {
return results ;
}
return gather ( octokit , results , iterator , mapFn ) ;
} ) ;
}
/ * *
* @ param octokit Octokit instance
* @ param options Options passed to Octokit constructor
* /
function paginateRest ( octokit ) {
return {
paginate : Object . assign ( paginate . bind ( null , octokit ) , {
iterator : iterator . bind ( null , octokit )
} )
} ;
}
paginateRest . VERSION = VERSION ;
exports . paginateRest = paginateRest ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 205 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var once = _ _webpack _require _ _ ( 223 ) ;
var noop = function ( ) { } ;
var isRequest = function ( stream ) {
return stream . setHeader && typeof stream . abort === 'function' ;
} ;
var isChildProcess = function ( stream ) {
return stream . stdio && Array . isArray ( stream . stdio ) && stream . stdio . length === 3
} ;
var eos = function ( stream , opts , callback ) {
if ( typeof opts === 'function' ) return eos ( stream , null , opts ) ;
if ( ! opts ) opts = { } ;
callback = once ( callback || noop ) ;
var ws = stream . _writableState ;
var rs = stream . _readableState ;
var readable = opts . readable || ( opts . readable !== false && stream . readable ) ;
var writable = opts . writable || ( opts . writable !== false && stream . writable ) ;
var cancelled = false ;
var onlegacyfinish = function ( ) {
if ( ! stream . writable ) onfinish ( ) ;
} ;
var onfinish = function ( ) {
writable = false ;
if ( ! readable ) callback . call ( stream ) ;
} ;
var onend = function ( ) {
readable = false ;
if ( ! writable ) callback . call ( stream ) ;
} ;
var onexit = function ( exitCode ) {
callback . call ( stream , exitCode ? new Error ( 'exited with error code: ' + exitCode ) : null ) ;
} ;
var onerror = function ( err ) {
callback . call ( stream , err ) ;
} ;
var onclose = function ( ) {
process . nextTick ( onclosenexttick ) ;
} ;
var onclosenexttick = function ( ) {
if ( cancelled ) return ;
if ( readable && ! ( rs && ( rs . ended && ! rs . destroyed ) ) ) return callback . call ( stream , new Error ( 'premature close' ) ) ;
if ( writable && ! ( ws && ( ws . ended && ! ws . destroyed ) ) ) return callback . call ( stream , new Error ( 'premature close' ) ) ;
} ;
var onrequest = function ( ) {
stream . req . on ( 'finish' , onfinish ) ;
} ;
if ( isRequest ( stream ) ) {
stream . on ( 'complete' , onfinish ) ;
stream . on ( 'abort' , onclose ) ;
if ( stream . req ) onrequest ( ) ;
else stream . on ( 'request' , onrequest ) ;
} else if ( writable && ! ws ) { // legacy streams
stream . on ( 'end' , onlegacyfinish ) ;
stream . on ( 'close' , onlegacyfinish ) ;
}
if ( isChildProcess ( stream ) ) stream . on ( 'exit' , onexit ) ;
stream . on ( 'end' , onend ) ;
stream . on ( 'finish' , onfinish ) ;
if ( opts . error !== false ) stream . on ( 'error' , onerror ) ;
stream . on ( 'close' , onclose ) ;
return function ( ) {
cancelled = true ;
stream . removeListener ( 'complete' , onfinish ) ;
stream . removeListener ( 'abort' , onclose ) ;
stream . removeListener ( 'request' , onrequest ) ;
if ( stream . req ) stream . req . removeListener ( 'finish' , onfinish ) ;
stream . removeListener ( 'end' , onlegacyfinish ) ;
stream . removeListener ( 'close' , onlegacyfinish ) ;
stream . removeListener ( 'finish' , onfinish ) ;
stream . removeListener ( 'exit' , onexit ) ;
stream . removeListener ( 'end' , onend ) ;
stream . removeListener ( 'error' , onerror ) ;
stream . removeListener ( 'close' , onclose ) ;
} ;
} ;
module . exports = eos ;
/***/ } ) ,
/***/ 207 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = which
which . sync = whichSync
var isWindows = process . platform === 'win32' ||
process . env . OSTYPE === 'cygwin' ||
process . env . OSTYPE === 'msys'
var path = _ _webpack _require _ _ ( 622 )
var COLON = isWindows ? ';' : ':'
var isexe = _ _webpack _require _ _ ( 126 )
function getNotFoundError ( cmd ) {
var er = new Error ( 'not found: ' + cmd )
er . code = 'ENOENT'
return er
}
function getPathInfo ( cmd , opt ) {
var colon = opt . colon || COLON
var pathEnv = opt . path || process . env . PATH || ''
var pathExt = [ '' ]
pathEnv = pathEnv . split ( colon )
var pathExtExe = ''
if ( isWindows ) {
pathEnv . unshift ( process . cwd ( ) )
pathExtExe = ( opt . pathExt || process . env . PATHEXT || '.EXE;.CMD;.BAT;.COM' )
pathExt = pathExtExe . split ( colon )
// Always test the cmd itself first. isexe will check to make sure
// it's found in the pathExt set.
if ( cmd . indexOf ( '.' ) !== - 1 && pathExt [ 0 ] !== '' )
pathExt . unshift ( '' )
}
// If it has a slash, then we don't bother searching the pathenv.
// just check the file itself, and that's it.
if ( cmd . match ( /\// ) || isWindows && cmd . match ( /\\/ ) )
pathEnv = [ '' ]
return {
env : pathEnv ,
ext : pathExt ,
extExe : pathExtExe
}
}
function which ( cmd , opt , cb ) {
if ( typeof opt === 'function' ) {
cb = opt
opt = { }
}
var info = getPathInfo ( cmd , opt )
var pathEnv = info . env
var pathExt = info . ext
var pathExtExe = info . extExe
var found = [ ]
; ( function F ( i , l ) {
if ( i === l ) {
if ( opt . all && found . length )
return cb ( null , found )
else
return cb ( getNotFoundError ( cmd ) )
}
var pathPart = pathEnv [ i ]
if ( pathPart . charAt ( 0 ) === '"' && pathPart . slice ( - 1 ) === '"' )
pathPart = pathPart . slice ( 1 , - 1 )
var p = path . join ( pathPart , cmd )
if ( ! pathPart && ( /^\.[\\\/]/ ) . test ( cmd ) ) {
p = cmd . slice ( 0 , 2 ) + p
}
; ( function E ( ii , ll ) {
if ( ii === ll ) return F ( i + 1 , l )
var ext = pathExt [ ii ]
isexe ( p + ext , { pathExt : pathExtExe } , function ( er , is ) {
if ( ! er && is ) {
if ( opt . all )
found . push ( p + ext )
else
return cb ( null , p + ext )
}
return E ( ii + 1 , ll )
} )
} ) ( 0 , pathExt . length )
} ) ( 0 , pathEnv . length )
}
function whichSync ( cmd , opt ) {
opt = opt || { }
var info = getPathInfo ( cmd , opt )
var pathEnv = info . env
var pathExt = info . ext
var pathExtExe = info . extExe
var found = [ ]
for ( var i = 0 , l = pathEnv . length ; i < l ; i ++ ) {
var pathPart = pathEnv [ i ]
if ( pathPart . charAt ( 0 ) === '"' && pathPart . slice ( - 1 ) === '"' )
pathPart = pathPart . slice ( 1 , - 1 )
var p = path . join ( pathPart , cmd )
if ( ! pathPart && /^\.[\\\/]/ . test ( cmd ) ) {
p = cmd . slice ( 0 , 2 ) + p
}
for ( var j = 0 , ll = pathExt . length ; j < ll ; j ++ ) {
var cur = p + pathExt [ j ]
var is
try {
is = isexe . sync ( cur , { pathExt : pathExtExe } )
if ( is ) {
if ( opt . all )
found . push ( cur )
else
return cur
}
} catch ( ex ) { }
}
}
if ( opt . all && found . length )
return found
if ( opt . nothrow )
return null
throw getNotFoundError ( cmd )
}
/***/ } ) ,
/***/ 211 :
/***/ ( function ( module ) {
module . exports = require ( "https" ) ;
/***/ } ) ,
/***/ 219 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var net = _ _webpack _require _ _ ( 631 ) ;
var tls = _ _webpack _require _ _ ( 16 ) ;
var http = _ _webpack _require _ _ ( 605 ) ;
var https = _ _webpack _require _ _ ( 211 ) ;
var events = _ _webpack _require _ _ ( 614 ) ;
var assert = _ _webpack _require _ _ ( 357 ) ;
var util = _ _webpack _require _ _ ( 669 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
}
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
} ) ;
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
}
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
}
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
}
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
}
}
return target ;
}
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
}
exports . debug = debug ; // for test
/***/ } ) ,
/***/ 223 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var wrappy = _ _webpack _require _ _ ( 940 )
module . exports = wrappy ( once )
module . exports . strict = wrappy ( onceStrict )
once . proto = once ( function ( ) {
Object . defineProperty ( Function . prototype , 'once' , {
value : function ( ) {
return once ( this )
} ,
configurable : true
} )
Object . defineProperty ( Function . prototype , 'onceStrict' , {
value : function ( ) {
return onceStrict ( this )
} ,
configurable : true
} )
} )
function once ( fn ) {
var f = function ( ) {
if ( f . called ) return f . value
f . called = true
return f . value = fn . apply ( this , arguments )
}
f . called = false
return f
}
function onceStrict ( fn ) {
var f = function ( ) {
if ( f . called )
throw new Error ( f . onceError )
f . called = true
return f . value = fn . apply ( this , arguments )
}
var name = fn . name || 'Function wrapped with `once`'
f . onceError = name + " shouldn't be called more than once"
f . called = false
return f
}
/***/ } ) ,
/***/ 234 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var endpoint = _ _webpack _require _ _ ( 440 ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
var isPlainObject = _interopDefault ( _ _webpack _require _ _ ( 840 ) ) ;
var nodeFetch = _interopDefault ( _ _webpack _require _ _ ( 467 ) ) ;
var requestError = _ _webpack _require _ _ ( 537 ) ;
const VERSION = "5.4.4" ;
function getBufferResponse ( response ) {
return response . arrayBuffer ( ) ;
}
function fetchWrapper ( requestOptions ) {
if ( isPlainObject ( requestOptions . body ) || Array . isArray ( requestOptions . body ) ) {
requestOptions . body = JSON . stringify ( requestOptions . body ) ;
}
let headers = { } ;
let status ;
let url ;
const fetch = requestOptions . request && requestOptions . request . fetch || nodeFetch ;
return fetch ( requestOptions . url , Object . assign ( {
method : requestOptions . method ,
body : requestOptions . body ,
headers : requestOptions . headers ,
redirect : requestOptions . redirect
} , requestOptions . request ) ) . then ( response => {
url = response . url ;
status = response . status ;
for ( const keyAndValue of response . headers ) {
headers [ keyAndValue [ 0 ] ] = keyAndValue [ 1 ] ;
}
if ( status === 204 || status === 205 ) {
return ;
} // GitHub API returns 200 for HEAD requests
if ( requestOptions . method === "HEAD" ) {
if ( status < 400 ) {
return ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
throw new requestError . RequestError ( response . statusText , status , {
headers ,
request : requestOptions
} ) ;
}
if ( status === 304 ) {
throw new requestError . RequestError ( "Not modified" , status , {
headers ,
request : requestOptions
} ) ;
}
if ( status >= 400 ) {
return response . text ( ) . then ( message => {
const error = new requestError . RequestError ( message , status , {
headers ,
request : requestOptions
} ) ;
try {
let responseBody = JSON . parse ( error . message ) ;
Object . assign ( error , responseBody ) ;
let errors = responseBody . errors ; // Assumption `errors` would always be in Array format
error . message = error . message + ": " + errors . map ( JSON . stringify ) . join ( ", " ) ;
} catch ( e ) { // ignore, see octokit/rest.js#684
}
throw error ;
} ) ;
}
const contentType = response . headers . get ( "content-type" ) ;
if ( /application\/json/ . test ( contentType ) ) {
return response . json ( ) ;
}
if ( ! contentType || /^text\/|charset=utf-8$/ . test ( contentType ) ) {
return response . text ( ) ;
}
return getBufferResponse ( response ) ;
} ) . then ( data => {
return {
status ,
url ,
headers ,
data
} ;
} ) . catch ( error => {
if ( error instanceof requestError . RequestError ) {
throw error ;
}
throw new requestError . RequestError ( error . message , 500 , {
headers ,
request : requestOptions
} ) ;
} ) ;
}
function withDefaults ( oldEndpoint , newDefaults ) {
const endpoint = oldEndpoint . defaults ( newDefaults ) ;
const newApi = function ( route , parameters ) {
const endpointOptions = endpoint . merge ( route , parameters ) ;
if ( ! endpointOptions . request || ! endpointOptions . request . hook ) {
return fetchWrapper ( endpoint . parse ( endpointOptions ) ) ;
}
const request = ( route , parameters ) => {
return fetchWrapper ( endpoint . parse ( endpoint . merge ( route , parameters ) ) ) ;
} ;
Object . assign ( request , {
endpoint ,
defaults : withDefaults . bind ( null , endpoint )
} ) ;
return endpointOptions . request . hook ( request , endpointOptions ) ;
} ;
return Object . assign ( newApi , {
endpoint ,
defaults : withDefaults . bind ( null , endpoint )
} ) ;
}
const request = withDefaults ( endpoint . endpoint , {
headers : {
"user-agent" : ` octokit-request.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } `
}
} ) ;
exports . request = request ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 252 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const fs = _ _webpack _require _ _ ( 747 ) ;
const shebangCommand = _ _webpack _require _ _ ( 32 ) ;
function readShebang ( command ) {
// Read the first 150 bytes from the file
const size = 150 ;
let buffer ;
if ( Buffer . alloc ) {
// Node.js v4.5+ / v5.10+
buffer = Buffer . alloc ( size ) ;
} else {
// Old Node.js API
buffer = new Buffer ( size ) ;
buffer . fill ( 0 ) ; // zero-fill
}
let fd ;
try {
fd = fs . openSync ( command , 'r' ) ;
fs . readSync ( fd , buffer , 0 , size , 0 ) ;
fs . closeSync ( fd ) ;
} catch ( e ) { /* Empty */ }
// Attempt to extract shebang (null is returned if not a shebang)
return shebangCommand ( buffer . toString ( ) ) ;
}
module . exports = readShebang ;
/***/ } ) ,
/***/ 274 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const path = _ _webpack _require _ _ ( 622 ) ;
const which = _ _webpack _require _ _ ( 207 ) ;
const pathKey = _ _webpack _require _ _ ( 539 ) ( ) ;
function resolveCommandAttempt ( parsed , withoutPathExt ) {
const cwd = process . cwd ( ) ;
const hasCustomCwd = parsed . options . cwd != null ;
// If a custom `cwd` was specified, we need to change the process cwd
// because `which` will do stat calls but does not support a custom cwd
if ( hasCustomCwd ) {
try {
process . chdir ( parsed . options . cwd ) ;
} catch ( err ) {
/* Empty */
}
}
let resolved ;
try {
resolved = which . sync ( parsed . command , {
path : ( parsed . options . env || process . env ) [ pathKey ] ,
pathExt : withoutPathExt ? path . delimiter : undefined ,
} ) ;
} catch ( e ) {
/* Empty */
} finally {
process . chdir ( cwd ) ;
}
// If we successfully resolved, ensure that an absolute path is returned
// Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
if ( resolved ) {
resolved = path . resolve ( hasCustomCwd ? parsed . options . cwd : '' , resolved ) ;
}
return resolved ;
}
function resolveCommand ( parsed ) {
return resolveCommandAttempt ( parsed ) || resolveCommandAttempt ( parsed , true ) ;
}
module . exports = resolveCommand ;
/***/ } ) ,
/***/ 294 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = _ _webpack _require _ _ ( 219 ) ;
/***/ } ) ,
/***/ 330 :
/***/ ( function ( module ) {
"use strict" ;
module . exports = ( promise , onFinally ) => {
onFinally = onFinally || ( ( ) => { } ) ;
return promise . then (
val => new Promise ( resolve => {
resolve ( onFinally ( ) ) ;
} ) . then ( ( ) => val ) ,
err => new Promise ( resolve => {
resolve ( onFinally ( ) ) ;
} ) . then ( ( ) => {
throw err ;
} )
) ;
} ;
/***/ } ) ,
/***/ 334 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
async function auth ( token ) {
const tokenType = token . split ( /\./ ) . length === 3 ? "app" : /^v\d+\./ . test ( token ) ? "installation" : "oauth" ;
return {
type : "token" ,
token : token ,
tokenType
} ;
}
/ * *
* Prefix token for usage in the Authorization header
*
* @ param token OAuth token or JSON Web Token
* /
function withAuthorizationPrefix ( token ) {
if ( token . split ( /\./ ) . length === 3 ) {
return ` bearer ${ token } ` ;
}
return ` token ${ token } ` ;
}
async function hook ( token , request , route , parameters ) {
const endpoint = request . endpoint . merge ( route , parameters ) ;
endpoint . headers . authorization = withAuthorizationPrefix ( token ) ;
return request ( endpoint ) ;
}
const createTokenAuth = function createTokenAuth ( token ) {
if ( ! token ) {
throw new Error ( "[@octokit/auth-token] No token passed to createTokenAuth" ) ;
}
if ( typeof token !== "string" ) {
throw new Error ( "[@octokit/auth-token] Token passed to createTokenAuth is not a string" ) ;
}
token = token . replace ( /^(token|bearer) +/i , "" ) ;
return Object . assign ( auth . bind ( null , token ) , {
hook : hook . bind ( null , token )
} ) ;
} ;
exports . createTokenAuth = createTokenAuth ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 341 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var once = _ _webpack _require _ _ ( 223 )
var eos = _ _webpack _require _ _ ( 205 )
var fs = _ _webpack _require _ _ ( 747 ) // we only need fs to get the ReadStream and WriteStream prototypes
var noop = function ( ) { }
var ancient = /^v?\.0/ . test ( process . version )
var isFn = function ( fn ) {
return typeof fn === 'function'
}
var isFS = function ( stream ) {
if ( ! ancient ) return false // newer node version do not need to care about fs is a special way
if ( ! fs ) return false // browser
return ( stream instanceof ( fs . ReadStream || noop ) || stream instanceof ( fs . WriteStream || noop ) ) && isFn ( stream . close )
}
var isRequest = function ( stream ) {
return stream . setHeader && isFn ( stream . abort )
}
var destroyer = function ( stream , reading , writing , callback ) {
callback = once ( callback )
var closed = false
stream . on ( 'close' , function ( ) {
closed = true
} )
eos ( stream , { readable : reading , writable : writing } , function ( err ) {
if ( err ) return callback ( err )
closed = true
callback ( )
} )
var destroyed = false
return function ( err ) {
if ( closed ) return
if ( destroyed ) return
destroyed = true
if ( isFS ( stream ) ) return stream . close ( noop ) // use close for fs streams to avoid fd leaks
if ( isRequest ( stream ) ) return stream . abort ( ) // request.destroy just do .end - .abort is what we want
if ( isFn ( stream . destroy ) ) return stream . destroy ( )
callback ( err || new Error ( 'stream was destroyed' ) )
}
}
var call = function ( fn ) {
fn ( )
}
var pipe = function ( from , to ) {
return from . pipe ( to )
}
var pump = function ( ) {
var streams = Array . prototype . slice . call ( arguments )
var callback = isFn ( streams [ streams . length - 1 ] || noop ) && streams . pop ( ) || noop
if ( Array . isArray ( streams [ 0 ] ) ) streams = streams [ 0 ]
if ( streams . length < 2 ) throw new Error ( 'pump requires two streams per minimum' )
var error
var destroys = streams . map ( function ( stream , i ) {
var reading = i < streams . length - 1
var writing = i > 0
return destroyer ( stream , reading , writing , function ( err ) {
if ( ! error ) error = err
if ( err ) destroys . forEach ( call )
if ( reading ) return
destroys . forEach ( call )
callback ( error )
} )
} )
return streams . reduce ( pipe )
}
module . exports = pump
/***/ } ) ,
/***/ 351 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
}
}
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
function escapeData ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
/***/ } ) ,
/***/ 357 :
/***/ ( function ( module ) {
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 413 :
/***/ ( function ( module ) {
module . exports = require ( "stream" ) ;
/***/ } ) ,
/***/ 429 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var osName = _interopDefault ( _ _webpack _require _ _ ( 824 ) ) ;
function getUserAgent ( ) {
try {
return ` Node.js/ ${ process . version . substr ( 1 ) } ( ${ osName ( ) } ; ${ process . arch } ) ` ;
} catch ( error ) {
if ( /wmic os get Caption/ . test ( error . message ) ) {
return "Windows <version undetectable>" ;
}
return "<environment undetectable>" ;
}
}
exports . getUserAgent = getUserAgent ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 438 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . getOctokit = exports . context = void 0 ;
const Context = _ _importStar ( _ _webpack _require _ _ ( 53 ) ) ;
const utils _1 = _ _webpack _require _ _ ( 30 ) ;
exports . context = new Context . Context ( ) ;
/ * *
* Returns a hydrated octokit ready to use for GitHub Actions
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokit ( token , options ) {
return new utils _1 . GitHub ( utils _1 . getOctokitOptions ( token , options ) ) ;
}
exports . getOctokit = getOctokit ;
//# sourceMappingURL=github.js.map
/***/ } ) ,
/***/ 440 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var isPlainObject = _interopDefault ( _ _webpack _require _ _ ( 840 ) ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
function lowercaseKeys ( object ) {
if ( ! object ) {
return { } ;
}
return Object . keys ( object ) . reduce ( ( newObj , key ) => {
newObj [ key . toLowerCase ( ) ] = object [ key ] ;
return newObj ;
} , { } ) ;
}
function mergeDeep ( defaults , options ) {
const result = Object . assign ( { } , defaults ) ;
Object . keys ( options ) . forEach ( key => {
if ( isPlainObject ( options [ key ] ) ) {
if ( ! ( key in defaults ) ) Object . assign ( result , {
[ key ] : options [ key ]
} ) ; else result [ key ] = mergeDeep ( defaults [ key ] , options [ key ] ) ;
} else {
Object . assign ( result , {
[ key ] : options [ key ]
} ) ;
}
} ) ;
return result ;
}
function merge ( defaults , route , options ) {
if ( typeof route === "string" ) {
let [ method , url ] = route . split ( " " ) ;
options = Object . assign ( url ? {
method ,
url
} : {
url : method
} , options ) ;
} else {
options = Object . assign ( { } , route ) ;
} // lowercase header names before merging with defaults to avoid duplicates
options . headers = lowercaseKeys ( options . headers ) ;
const mergedOptions = mergeDeep ( defaults || { } , options ) ; // mediaType.previews arrays are merged, instead of overwritten
if ( defaults && defaults . mediaType . previews . length ) {
mergedOptions . mediaType . previews = defaults . mediaType . previews . filter ( preview => ! mergedOptions . mediaType . previews . includes ( preview ) ) . concat ( mergedOptions . mediaType . previews ) ;
}
mergedOptions . mediaType . previews = mergedOptions . mediaType . previews . map ( preview => preview . replace ( /-preview/ , "" ) ) ;
return mergedOptions ;
}
function addQueryParameters ( url , parameters ) {
const separator = /\?/ . test ( url ) ? "&" : "?" ;
const names = Object . keys ( parameters ) ;
if ( names . length === 0 ) {
return url ;
}
return url + separator + names . map ( name => {
if ( name === "q" ) {
return "q=" + parameters . q . split ( "+" ) . map ( encodeURIComponent ) . join ( "+" ) ;
}
return ` ${ name } = ${ encodeURIComponent ( parameters [ name ] ) } ` ;
} ) . join ( "&" ) ;
}
const urlVariableRegex = /\{[^}]+\}/g ;
function removeNonChars ( variableName ) {
return variableName . replace ( /^\W+|\W+$/g , "" ) . split ( /,/ ) ;
}
function extractUrlVariableNames ( url ) {
const matches = url . match ( urlVariableRegex ) ;
if ( ! matches ) {
return [ ] ;
}
return matches . map ( removeNonChars ) . reduce ( ( a , b ) => a . concat ( b ) , [ ] ) ;
}
function omit ( object , keysToOmit ) {
return Object . keys ( object ) . filter ( option => ! keysToOmit . includes ( option ) ) . reduce ( ( obj , key ) => {
obj [ key ] = object [ key ] ;
return obj ;
} , { } ) ;
}
// Based on https://github.com/bramstein/url-template, licensed under BSD
// TODO: create separate package.
//
// Copyright (c) 2012-2014, Bram Stein
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. The name of the author may not be used to endorse or promote products
// derived from this software without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/* istanbul ignore file */
function encodeReserved ( str ) {
return str . split ( /(%[0-9A-Fa-f]{2})/g ) . map ( function ( part ) {
if ( ! /%[0-9A-Fa-f]/ . test ( part ) ) {
part = encodeURI ( part ) . replace ( /%5B/g , "[" ) . replace ( /%5D/g , "]" ) ;
}
return part ;
} ) . join ( "" ) ;
}
function encodeUnreserved ( str ) {
return encodeURIComponent ( str ) . replace ( /[!'()*]/g , function ( c ) {
return "%" + c . charCodeAt ( 0 ) . toString ( 16 ) . toUpperCase ( ) ;
} ) ;
}
function encodeValue ( operator , value , key ) {
value = operator === "+" || operator === "#" ? encodeReserved ( value ) : encodeUnreserved ( value ) ;
if ( key ) {
return encodeUnreserved ( key ) + "=" + value ;
} else {
return value ;
}
}
function isDefined ( value ) {
return value !== undefined && value !== null ;
}
function isKeyOperator ( operator ) {
return operator === ";" || operator === "&" || operator === "?" ;
}
function getValues ( context , operator , key , modifier ) {
var value = context [ key ] ,
result = [ ] ;
if ( isDefined ( value ) && value !== "" ) {
if ( typeof value === "string" || typeof value === "number" || typeof value === "boolean" ) {
value = value . toString ( ) ;
if ( modifier && modifier !== "*" ) {
value = value . substring ( 0 , parseInt ( modifier , 10 ) ) ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
result . push ( encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" ) ) ;
} else {
if ( modifier === "*" ) {
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value ) {
result . push ( encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" ) ) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
result . push ( encodeValue ( operator , value [ k ] , k ) ) ;
}
} ) ;
}
} else {
const tmp = [ ] ;
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value ) {
tmp . push ( encodeValue ( operator , value ) ) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
tmp . push ( encodeUnreserved ( k ) ) ;
tmp . push ( encodeValue ( operator , value [ k ] . toString ( ) ) ) ;
}
} ) ;
}
if ( isKeyOperator ( operator ) ) {
result . push ( encodeUnreserved ( key ) + "=" + tmp . join ( "," ) ) ;
} else if ( tmp . length !== 0 ) {
result . push ( tmp . join ( "," ) ) ;
}
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
}
} else {
if ( operator === ";" ) {
if ( isDefined ( value ) ) {
result . push ( encodeUnreserved ( key ) ) ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} else if ( value === "" && ( operator === "&" || operator === "?" ) ) {
result . push ( encodeUnreserved ( key ) + "=" ) ;
} else if ( value === "" ) {
result . push ( "" ) ;
}
}
return result ;
}
function parseUrl ( template ) {
return {
expand : expand . bind ( null , template )
} ;
}
function expand ( template , context ) {
var operators = [ "+" , "#" , "." , "/" , ";" , "?" , "&" ] ;
return template . replace ( /\{([^\{\}]+)\}|([^\{\}]+)/g , function ( _ , expression , literal ) {
if ( expression ) {
let operator = "" ;
const values = [ ] ;
if ( operators . indexOf ( expression . charAt ( 0 ) ) !== - 1 ) {
operator = expression . charAt ( 0 ) ;
expression = expression . substr ( 1 ) ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
expression . split ( /,/g ) . forEach ( function ( variable ) {
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/ . exec ( variable ) ;
values . push ( getValues ( context , operator , tmp [ 1 ] , tmp [ 2 ] || tmp [ 3 ] ) ) ;
} ) ;
if ( operator && operator !== "+" ) {
var separator = "," ;
if ( operator === "?" ) {
separator = "&" ;
} else if ( operator !== "#" ) {
separator = operator ;
}
return ( values . length !== 0 ? operator : "" ) + values . join ( separator ) ;
} else {
return values . join ( "," ) ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} else {
return encodeReserved ( literal ) ;
}
} ) ;
}
function parse ( options ) {
// https://fetch.spec.whatwg.org/#methods
let method = options . method . toUpperCase ( ) ; // replace :varname with {varname} to make it RFC 6570 compatible
let url = ( options . url || "/" ) . replace ( /:([a-z]\w+)/g , "{+$1}" ) ;
let headers = Object . assign ( { } , options . headers ) ;
let body ;
let parameters = omit ( options , [ "method" , "baseUrl" , "url" , "headers" , "request" , "mediaType" ] ) ; // extract variable names from URL to calculate remaining variables later
const urlVariableNames = extractUrlVariableNames ( url ) ;
url = parseUrl ( url ) . expand ( parameters ) ;
if ( ! /^http/ . test ( url ) ) {
url = options . baseUrl + url ;
}
const omittedParameters = Object . keys ( options ) . filter ( option => urlVariableNames . includes ( option ) ) . concat ( "baseUrl" ) ;
const remainingParameters = omit ( parameters , omittedParameters ) ;
const isBinaryRequset = /application\/octet-stream/i . test ( headers . accept ) ;
if ( ! isBinaryRequset ) {
if ( options . mediaType . format ) {
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
headers . accept = headers . accept . split ( /,/ ) . map ( preview => preview . replace ( /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/ , ` application/vnd $ 1 $ 2. ${ options . mediaType . format } ` ) ) . join ( "," ) ;
}
if ( options . mediaType . previews . length ) {
const previewsFromAcceptHeader = headers . accept . match ( /[\w-]+(?=-preview)/g ) || [ ] ;
headers . accept = previewsFromAcceptHeader . concat ( options . mediaType . previews ) . map ( preview => {
const format = options . mediaType . format ? ` . ${ options . mediaType . format } ` : "+json" ;
return ` application/vnd.github. ${ preview } -preview ${ format } ` ;
} ) . join ( "," ) ;
}
} // for GET/HEAD requests, set URL query parameters from remaining parameters
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
if ( [ "GET" , "HEAD" ] . includes ( method ) ) {
url = addQueryParameters ( url , remainingParameters ) ;
} else {
if ( "data" in remainingParameters ) {
body = remainingParameters . data ;
} else {
if ( Object . keys ( remainingParameters ) . length ) {
body = remainingParameters ;
} else {
headers [ "content-length" ] = 0 ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
}
} // default content-type for JSON if body is set
if ( ! headers [ "content-type" ] && typeof body !== "undefined" ) {
headers [ "content-type" ] = "application/json; charset=utf-8" ;
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
// fetch does not allow to set `content-length` header, but we can set body to an empty string
if ( [ "PATCH" , "PUT" ] . includes ( method ) && typeof body === "undefined" ) {
body = "" ;
} // Only return body/request keys if present
return Object . assign ( {
method ,
url ,
headers
} , typeof body !== "undefined" ? {
body
} : null , options . request ? {
request : options . request
} : null ) ;
}
function endpointWithDefaults ( defaults , route , options ) {
return parse ( merge ( defaults , route , options ) ) ;
}
function withDefaults ( oldDefaults , newDefaults ) {
const DEFAULTS = merge ( oldDefaults , newDefaults ) ;
const endpoint = endpointWithDefaults . bind ( null , DEFAULTS ) ;
return Object . assign ( endpoint , {
DEFAULTS ,
defaults : withDefaults . bind ( null , DEFAULTS ) ,
merge : merge . bind ( null , DEFAULTS ) ,
parse
} ) ;
}
const VERSION = "6.0.2" ;
const userAgent = ` octokit-endpoint.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } ` ; // DEFAULTS has all properties set that EndpointOptions has, except url.
// So we use RequestParameters and add method as additional required property.
const DEFAULTS = {
method : "GET" ,
baseUrl : "https://api.github.com" ,
headers : {
accept : "application/vnd.github.v3+json" ,
"user-agent" : userAgent
} ,
mediaType : {
format : "" ,
previews : [ ]
}
} ;
const endpoint = withDefaults ( null , DEFAULTS ) ;
exports . endpoint = endpoint ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 443 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const url = _ _webpack _require _ _ ( 835 ) ;
function getProxyUrl ( reqUrl ) {
let usingSsl = reqUrl . protocol === 'https:' ;
let proxyUrl ;
if ( checkBypass ( reqUrl ) ) {
return proxyUrl ;
}
let proxyVar ;
if ( usingSsl ) {
proxyVar = process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
proxyVar = process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
if ( proxyVar ) {
proxyUrl = url . parse ( proxyVar ) ;
}
return proxyUrl ;
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
let noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
let upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
for ( let upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
/***/ } ) ,
/***/ 447 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const path = _ _webpack _require _ _ ( 622 ) ;
const childProcess = _ _webpack _require _ _ ( 129 ) ;
const crossSpawn = _ _webpack _require _ _ ( 746 ) ;
const stripEof = _ _webpack _require _ _ ( 774 ) ;
const npmRunPath = _ _webpack _require _ _ ( 502 ) ;
const isStream = _ _webpack _require _ _ ( 554 ) ;
const _getStream = _ _webpack _require _ _ ( 766 ) ;
const pFinally = _ _webpack _require _ _ ( 330 ) ;
const onExit = _ _webpack _require _ _ ( 931 ) ;
const errname = _ _webpack _require _ _ ( 689 ) ;
const stdio = _ _webpack _require _ _ ( 166 ) ;
const TEN _MEGABYTES = 1000 * 1000 * 10 ;
function handleArgs ( cmd , args , opts ) {
let parsed ;
opts = Object . assign ( {
extendEnv : true ,
env : { }
} , opts ) ;
if ( opts . extendEnv ) {
opts . env = Object . assign ( { } , process . env , opts . env ) ;
}
if ( opts . _ _winShell === true ) {
delete opts . _ _winShell ;
parsed = {
command : cmd ,
args ,
options : opts ,
file : cmd ,
original : {
cmd ,
args
}
} ;
} else {
parsed = crossSpawn . _parse ( cmd , args , opts ) ;
}
opts = Object . assign ( {
maxBuffer : TEN _MEGABYTES ,
buffer : true ,
stripEof : true ,
preferLocal : true ,
localDir : parsed . options . cwd || process . cwd ( ) ,
encoding : 'utf8' ,
reject : true ,
cleanup : true
} , parsed . options ) ;
opts . stdio = stdio ( opts ) ;
if ( opts . preferLocal ) {
opts . env = npmRunPath . env ( Object . assign ( { } , opts , { cwd : opts . localDir } ) ) ;
}
if ( opts . detached ) {
// #115
opts . cleanup = false ;
}
if ( process . platform === 'win32' && path . basename ( parsed . command ) === 'cmd.exe' ) {
// #116
parsed . args . unshift ( '/q' ) ;
}
return {
cmd : parsed . command ,
args : parsed . args ,
opts ,
parsed
} ;
}
function handleInput ( spawned , input ) {
if ( input === null || input === undefined ) {
return ;
}
if ( isStream ( input ) ) {
input . pipe ( spawned . stdin ) ;
} else {
spawned . stdin . end ( input ) ;
}
}
function handleOutput ( opts , val ) {
if ( val && opts . stripEof ) {
val = stripEof ( val ) ;
}
return val ;
}
function handleShell ( fn , cmd , opts ) {
let file = '/bin/sh' ;
let args = [ '-c' , cmd ] ;
opts = Object . assign ( { } , opts ) ;
if ( process . platform === 'win32' ) {
opts . _ _winShell = true ;
file = process . env . comspec || 'cmd.exe' ;
args = [ '/s' , '/c' , ` " ${ cmd } " ` ] ;
opts . windowsVerbatimArguments = true ;
}
if ( opts . shell ) {
file = opts . shell ;
delete opts . shell ;
}
return fn ( file , args , opts ) ;
}
function getStream ( process , stream , { encoding , buffer , maxBuffer } ) {
if ( ! process [ stream ] ) {
return null ;
}
let ret ;
if ( ! buffer ) {
// TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10
ret = new Promise ( ( resolve , reject ) => {
process [ stream ]
. once ( 'end' , resolve )
. once ( 'error' , reject ) ;
} ) ;
} else if ( encoding ) {
ret = _getStream ( process [ stream ] , {
encoding ,
maxBuffer
} ) ;
} else {
ret = _getStream . buffer ( process [ stream ] , { maxBuffer } ) ;
}
return ret . catch ( err => {
err . stream = stream ;
err . message = ` ${ stream } ${ err . message } ` ;
throw err ;
} ) ;
}
function makeError ( result , options ) {
const { stdout , stderr } = result ;
let err = result . error ;
const { code , signal } = result ;
const { parsed , joinedCmd } = options ;
const timedOut = options . timedOut || false ;
if ( ! err ) {
let output = '' ;
if ( Array . isArray ( parsed . opts . stdio ) ) {
if ( parsed . opts . stdio [ 2 ] !== 'inherit' ) {
output += output . length > 0 ? stderr : ` \n ${ stderr } ` ;
}
if ( parsed . opts . stdio [ 1 ] !== 'inherit' ) {
output += ` \n ${ stdout } ` ;
}
} else if ( parsed . opts . stdio !== 'inherit' ) {
output = ` \n ${ stderr } ${ stdout } ` ;
}
err = new Error ( ` Command failed: ${ joinedCmd } ${ output } ` ) ;
err . code = code < 0 ? errname ( code ) : code ;
}
err . stdout = stdout ;
err . stderr = stderr ;
err . failed = true ;
err . signal = signal || null ;
err . cmd = joinedCmd ;
err . timedOut = timedOut ;
return err ;
}
function joinCmd ( cmd , args ) {
let joinedCmd = cmd ;
if ( Array . isArray ( args ) && args . length > 0 ) {
joinedCmd += ' ' + args . join ( ' ' ) ;
}
return joinedCmd ;
}
module . exports = ( cmd , args , opts ) => {
const parsed = handleArgs ( cmd , args , opts ) ;
const { encoding , buffer , maxBuffer } = parsed . opts ;
const joinedCmd = joinCmd ( cmd , args ) ;
let spawned ;
try {
spawned = childProcess . spawn ( parsed . cmd , parsed . args , parsed . opts ) ;
} catch ( err ) {
return Promise . reject ( err ) ;
}
let removeExitHandler ;
if ( parsed . opts . cleanup ) {
removeExitHandler = onExit ( ( ) => {
spawned . kill ( ) ;
} ) ;
}
let timeoutId = null ;
let timedOut = false ;
const cleanup = ( ) => {
if ( timeoutId ) {
clearTimeout ( timeoutId ) ;
timeoutId = null ;
}
if ( removeExitHandler ) {
removeExitHandler ( ) ;
}
} ;
if ( parsed . opts . timeout > 0 ) {
timeoutId = setTimeout ( ( ) => {
timeoutId = null ;
timedOut = true ;
spawned . kill ( parsed . opts . killSignal ) ;
} , parsed . opts . timeout ) ;
}
const processDone = new Promise ( resolve => {
spawned . on ( 'exit' , ( code , signal ) => {
cleanup ( ) ;
resolve ( { code , signal } ) ;
} ) ;
spawned . on ( 'error' , err => {
cleanup ( ) ;
resolve ( { error : err } ) ;
} ) ;
if ( spawned . stdin ) {
spawned . stdin . on ( 'error' , err => {
cleanup ( ) ;
resolve ( { error : err } ) ;
} ) ;
}
} ) ;
function destroy ( ) {
if ( spawned . stdout ) {
spawned . stdout . destroy ( ) ;
}
if ( spawned . stderr ) {
spawned . stderr . destroy ( ) ;
}
}
const handlePromise = ( ) => pFinally ( Promise . all ( [
processDone ,
getStream ( spawned , 'stdout' , { encoding , buffer , maxBuffer } ) ,
getStream ( spawned , 'stderr' , { encoding , buffer , maxBuffer } )
] ) . then ( arr => {
const result = arr [ 0 ] ;
result . stdout = arr [ 1 ] ;
result . stderr = arr [ 2 ] ;
if ( result . error || result . code !== 0 || result . signal !== null ) {
const err = makeError ( result , {
joinedCmd ,
parsed ,
timedOut
} ) ;
// TODO: missing some timeout logic for killed
// https://github.com/nodejs/node/blob/master/lib/child_process.js#L203
// err.killed = spawned.killed || killed;
err . killed = err . killed || spawned . killed ;
if ( ! parsed . opts . reject ) {
return err ;
}
throw err ;
}
return {
stdout : handleOutput ( parsed . opts , result . stdout ) ,
stderr : handleOutput ( parsed . opts , result . stderr ) ,
code : 0 ,
failed : false ,
killed : false ,
signal : null ,
cmd : joinedCmd ,
timedOut : false
} ;
} ) , destroy ) ;
crossSpawn . _enoent . hookChildProcess ( spawned , parsed . parsed ) ;
handleInput ( spawned , parsed . opts . input ) ;
spawned . then = ( onfulfilled , onrejected ) => handlePromise ( ) . then ( onfulfilled , onrejected ) ;
spawned . catch = onrejected => handlePromise ( ) . catch ( onrejected ) ;
return spawned ;
} ;
// TODO: set `stderr: 'ignore'` when that option is implemented
module . exports . stdout = ( ... args ) => module . exports ( ... args ) . then ( x => x . stdout ) ;
// TODO: set `stdout: 'ignore'` when that option is implemented
module . exports . stderr = ( ... args ) => module . exports ( ... args ) . then ( x => x . stderr ) ;
module . exports . shell = ( cmd , opts ) => handleShell ( module . exports , cmd , opts ) ;
module . exports . sync = ( cmd , args , opts ) => {
const parsed = handleArgs ( cmd , args , opts ) ;
const joinedCmd = joinCmd ( cmd , args ) ;
if ( isStream ( parsed . opts . input ) ) {
throw new TypeError ( 'The `input` option cannot be a stream in sync mode' ) ;
}
const result = childProcess . spawnSync ( parsed . cmd , parsed . args , parsed . opts ) ;
result . code = result . status ;
if ( result . error || result . status !== 0 || result . signal !== null ) {
const err = makeError ( result , {
joinedCmd ,
parsed
} ) ;
if ( ! parsed . opts . reject ) {
return err ;
}
throw err ;
}
return {
stdout : handleOutput ( parsed . opts , result . stdout ) ,
stderr : handleOutput ( parsed . opts , result . stderr ) ,
code : 0 ,
failed : false ,
signal : null ,
cmd : joinedCmd ,
timedOut : false
} ;
} ;
module . exports . shellSync = ( cmd , opts ) => handleShell ( module . exports . sync , cmd , opts ) ;
/***/ } ) ,
/***/ 467 :
/***/ ( function ( module , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var Stream = _interopDefault ( _ _webpack _require _ _ ( 413 ) ) ;
var http = _interopDefault ( _ _webpack _require _ _ ( 605 ) ) ;
var Url = _interopDefault ( _ _webpack _require _ _ ( 835 ) ) ;
var https = _interopDefault ( _ _webpack _require _ _ ( 211 ) ) ;
var zlib = _interopDefault ( _ _webpack _require _ _ ( 761 ) ) ;
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
// fix for "Readable" isn't a named export issue
const Readable = Stream . Readable ;
const BUFFER = Symbol ( 'buffer' ) ;
const TYPE = Symbol ( 'type' ) ;
class Blob {
constructor ( ) {
this [ TYPE ] = '' ;
const blobParts = arguments [ 0 ] ;
const options = arguments [ 1 ] ;
const buffers = [ ] ;
let size = 0 ;
if ( blobParts ) {
const a = blobParts ;
const length = Number ( a . length ) ;
for ( let i = 0 ; i < length ; i ++ ) {
const element = a [ i ] ;
let buffer ;
if ( element instanceof Buffer ) {
buffer = element ;
} else if ( ArrayBuffer . isView ( element ) ) {
buffer = Buffer . from ( element . buffer , element . byteOffset , element . byteLength ) ;
} else if ( element instanceof ArrayBuffer ) {
buffer = Buffer . from ( element ) ;
} else if ( element instanceof Blob ) {
buffer = element [ BUFFER ] ;
} else {
buffer = Buffer . from ( typeof element === 'string' ? element : String ( element ) ) ;
}
size += buffer . length ;
buffers . push ( buffer ) ;
}
}
this [ BUFFER ] = Buffer . concat ( buffers ) ;
let type = options && options . type !== undefined && String ( options . type ) . toLowerCase ( ) ;
if ( type && ! /[^\u0020-\u007E]/ . test ( type ) ) {
this [ TYPE ] = type ;
}
}
get size ( ) {
return this [ BUFFER ] . length ;
}
get type ( ) {
return this [ TYPE ] ;
}
text ( ) {
return Promise . resolve ( this [ BUFFER ] . toString ( ) ) ;
}
arrayBuffer ( ) {
const buf = this [ BUFFER ] ;
const ab = buf . buffer . slice ( buf . byteOffset , buf . byteOffset + buf . byteLength ) ;
return Promise . resolve ( ab ) ;
}
stream ( ) {
const readable = new Readable ( ) ;
readable . _read = function ( ) { } ;
readable . push ( this [ BUFFER ] ) ;
readable . push ( null ) ;
return readable ;
}
toString ( ) {
return '[object Blob]' ;
}
slice ( ) {
const size = this . size ;
const start = arguments [ 0 ] ;
const end = arguments [ 1 ] ;
let relativeStart , relativeEnd ;
if ( start === undefined ) {
relativeStart = 0 ;
} else if ( start < 0 ) {
relativeStart = Math . max ( size + start , 0 ) ;
} else {
relativeStart = Math . min ( start , size ) ;
}
if ( end === undefined ) {
relativeEnd = size ;
} else if ( end < 0 ) {
relativeEnd = Math . max ( size + end , 0 ) ;
} else {
relativeEnd = Math . min ( end , size ) ;
}
const span = Math . max ( relativeEnd - relativeStart , 0 ) ;
const buffer = this [ BUFFER ] ;
const slicedBuffer = buffer . slice ( relativeStart , relativeStart + span ) ;
const blob = new Blob ( [ ] , { type : arguments [ 2 ] } ) ;
blob [ BUFFER ] = slicedBuffer ;
return blob ;
}
}
Object . defineProperties ( Blob . prototype , {
size : { enumerable : true } ,
type : { enumerable : true } ,
slice : { enumerable : true }
} ) ;
Object . defineProperty ( Blob . prototype , Symbol . toStringTag , {
value : 'Blob' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
/ * *
* fetch - error . js
*
* FetchError interface for operational errors
* /
/ * *
* Create FetchError instance
*
* @ param String message Error message for human
* @ param String type Error type for machine
* @ param String systemError For Node . js system error
* @ return FetchError
* /
function FetchError ( message , type , systemError ) {
Error . call ( this , message ) ;
this . message = message ;
this . type = type ;
// when err.type is `system`, err.code contains system error code
if ( systemError ) {
this . code = this . errno = systemError . code ;
}
// hide custom error implementation details from end-users
Error . captureStackTrace ( this , this . constructor ) ;
}
FetchError . prototype = Object . create ( Error . prototype ) ;
FetchError . prototype . constructor = FetchError ;
FetchError . prototype . name = 'FetchError' ;
let convert ;
try {
convert = _ _webpack _require _ _ ( 877 ) . convert ;
} catch ( e ) { }
const INTERNALS = Symbol ( 'Body internals' ) ;
// fix an issue where "PassThrough" isn't a named export for node <10
const PassThrough = Stream . PassThrough ;
/ * *
* Body mixin
*
* Ref : https : //fetch.spec.whatwg.org/#body
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
* /
function Body ( body ) {
var _this = this ;
var _ref = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ,
_ref$size = _ref . size ;
let size = _ref$size === undefined ? 0 : _ref$size ;
var _ref$timeout = _ref . timeout ;
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout ;
if ( body == null ) {
// body is undefined or null
body = null ;
} else if ( isURLSearchParams ( body ) ) {
// body is a URLSearchParams
body = Buffer . from ( body . toString ( ) ) ;
} else if ( isBlob ( body ) ) ; else if ( Buffer . isBuffer ( body ) ) ; else if ( Object . prototype . toString . call ( body ) === '[object ArrayBuffer]' ) {
// body is ArrayBuffer
body = Buffer . from ( body ) ;
} else if ( ArrayBuffer . isView ( body ) ) {
// body is ArrayBufferView
body = Buffer . from ( body . buffer , body . byteOffset , body . byteLength ) ;
} else if ( body instanceof Stream ) ; else {
// none of the above
// coerce to string then buffer
body = Buffer . from ( String ( body ) ) ;
}
this [ INTERNALS ] = {
body ,
disturbed : false ,
error : null
} ;
this . size = size ;
this . timeout = timeout ;
if ( body instanceof Stream ) {
body . on ( 'error' , function ( err ) {
const error = err . name === 'AbortError' ? err : new FetchError ( ` Invalid response body while trying to fetch ${ _this . url } : ${ err . message } ` , 'system' , err ) ;
_this [ INTERNALS ] . error = error ;
} ) ;
}
}
Body . prototype = {
get body ( ) {
return this [ INTERNALS ] . body ;
} ,
get bodyUsed ( ) {
return this [ INTERNALS ] . disturbed ;
} ,
/ * *
* Decode response as ArrayBuffer
*
* @ return Promise
* /
arrayBuffer ( ) {
return consumeBody . call ( this ) . then ( function ( buf ) {
return buf . buffer . slice ( buf . byteOffset , buf . byteOffset + buf . byteLength ) ;
} ) ;
} ,
/ * *
* Return raw response as Blob
*
* @ return Promise
* /
blob ( ) {
let ct = this . headers && this . headers . get ( 'content-type' ) || '' ;
return consumeBody . call ( this ) . then ( function ( buf ) {
return Object . assign (
// Prevent copying
new Blob ( [ ] , {
type : ct . toLowerCase ( )
} ) , {
[ BUFFER ] : buf
} ) ;
} ) ;
} ,
/ * *
* Decode response as json
*
* @ return Promise
* /
json ( ) {
var _this2 = this ;
return consumeBody . call ( this ) . then ( function ( buffer ) {
try {
return JSON . parse ( buffer . toString ( ) ) ;
} catch ( err ) {
return Body . Promise . reject ( new FetchError ( ` invalid json response body at ${ _this2 . url } reason: ${ err . message } ` , 'invalid-json' ) ) ;
}
} ) ;
} ,
/ * *
* Decode response as text
*
* @ return Promise
* /
text ( ) {
return consumeBody . call ( this ) . then ( function ( buffer ) {
return buffer . toString ( ) ;
} ) ;
} ,
/ * *
* Decode response as buffer ( non - spec api )
*
* @ return Promise
* /
buffer ( ) {
return consumeBody . call ( this ) ;
} ,
/ * *
* Decode response as text , while automatically detecting the encoding and
* trying to decode to UTF - 8 ( non - spec api )
*
* @ return Promise
* /
textConverted ( ) {
var _this3 = this ;
return consumeBody . call ( this ) . then ( function ( buffer ) {
return convertBody ( buffer , _this3 . headers ) ;
} ) ;
}
} ;
// In browsers, all properties are enumerable.
Object . defineProperties ( Body . prototype , {
body : { enumerable : true } ,
bodyUsed : { enumerable : true } ,
arrayBuffer : { enumerable : true } ,
blob : { enumerable : true } ,
json : { enumerable : true } ,
text : { enumerable : true }
} ) ;
Body . mixIn = function ( proto ) {
for ( const name of Object . getOwnPropertyNames ( Body . prototype ) ) {
// istanbul ignore else: future proof
if ( ! ( name in proto ) ) {
const desc = Object . getOwnPropertyDescriptor ( Body . prototype , name ) ;
Object . defineProperty ( proto , name , desc ) ;
}
}
} ;
/ * *
* Consume and convert an entire Body to a Buffer .
*
* Ref : https : //fetch.spec.whatwg.org/#concept-body-consume-body
*
* @ return Promise
* /
function consumeBody ( ) {
var _this4 = this ;
if ( this [ INTERNALS ] . disturbed ) {
return Body . Promise . reject ( new TypeError ( ` body used already for: ${ this . url } ` ) ) ;
}
this [ INTERNALS ] . disturbed = true ;
if ( this [ INTERNALS ] . error ) {
return Body . Promise . reject ( this [ INTERNALS ] . error ) ;
}
let body = this . body ;
// body is null
if ( body === null ) {
return Body . Promise . resolve ( Buffer . alloc ( 0 ) ) ;
}
// body is blob
if ( isBlob ( body ) ) {
body = body . stream ( ) ;
}
// body is buffer
if ( Buffer . isBuffer ( body ) ) {
return Body . Promise . resolve ( body ) ;
}
// istanbul ignore if: should never happen
if ( ! ( body instanceof Stream ) ) {
return Body . Promise . resolve ( Buffer . alloc ( 0 ) ) ;
}
// body is stream
// get ready to actually consume the body
let accum = [ ] ;
let accumBytes = 0 ;
let abort = false ;
return new Body . Promise ( function ( resolve , reject ) {
let resTimeout ;
// allow timeout on slow response body
if ( _this4 . timeout ) {
resTimeout = setTimeout ( function ( ) {
abort = true ;
reject ( new FetchError ( ` Response timeout while trying to fetch ${ _this4 . url } (over ${ _this4 . timeout } ms) ` , 'body-timeout' ) ) ;
} , _this4 . timeout ) ;
}
// handle stream errors
body . on ( 'error' , function ( err ) {
if ( err . name === 'AbortError' ) {
// if the request was aborted, reject with this Error
abort = true ;
reject ( err ) ;
} else {
// other errors, such as incorrect content-encoding
reject ( new FetchError ( ` Invalid response body while trying to fetch ${ _this4 . url } : ${ err . message } ` , 'system' , err ) ) ;
}
} ) ;
body . on ( 'data' , function ( chunk ) {
if ( abort || chunk === null ) {
return ;
}
if ( _this4 . size && accumBytes + chunk . length > _this4 . size ) {
abort = true ;
reject ( new FetchError ( ` content size at ${ _this4 . url } over limit: ${ _this4 . size } ` , 'max-size' ) ) ;
return ;
}
accumBytes += chunk . length ;
accum . push ( chunk ) ;
} ) ;
body . on ( 'end' , function ( ) {
if ( abort ) {
return ;
}
clearTimeout ( resTimeout ) ;
try {
resolve ( Buffer . concat ( accum , accumBytes ) ) ;
} catch ( err ) {
// handle streams that have accumulated too much data (issue #414)
reject ( new FetchError ( ` Could not create Buffer from response body for ${ _this4 . url } : ${ err . message } ` , 'system' , err ) ) ;
}
} ) ;
} ) ;
}
/ * *
* Detect buffer encoding and convert to target encoding
* ref : http : //www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
*
* @ param Buffer buffer Incoming buffer
* @ param String encoding Target encoding
* @ return String
* /
function convertBody ( buffer , headers ) {
if ( typeof convert !== 'function' ) {
throw new Error ( 'The package `encoding` must be installed to use the textConverted() function' ) ;
}
const ct = headers . get ( 'content-type' ) ;
let charset = 'utf-8' ;
let res , str ;
// header
if ( ct ) {
res = /charset=([^;]*)/i . exec ( ct ) ;
}
// no charset in content type, peek at response body for at most 1024 bytes
str = buffer . slice ( 0 , 1024 ) . toString ( ) ;
// html5
if ( ! res && str ) {
res = /<meta.+?charset=(['"])(.+?)\1/i . exec ( str ) ;
}
// html4
if ( ! res && str ) {
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i . exec ( str ) ;
2020-09-13 06:08:13 +00:00
if ( ! res ) {
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i . exec ( str ) ;
if ( res ) {
res . pop ( ) ; // drop last quote
}
}
2020-08-22 14:57:57 +09:00
if ( res ) {
res = /charset=(.*)/i . exec ( res . pop ( ) ) ;
}
}
// xml
if ( ! res && str ) {
res = /<\?xml.+?encoding=(['"])(.+?)\1/i . exec ( str ) ;
}
// found charset
if ( res ) {
charset = res . pop ( ) ;
// prevent decode issues when sites use incorrect encoding
// ref: https://hsivonen.fi/encoding-menu/
if ( charset === 'gb2312' || charset === 'gbk' ) {
charset = 'gb18030' ;
}
}
// turn raw buffers into a single utf-8 buffer
return convert ( buffer , 'UTF-8' , charset ) . toString ( ) ;
}
/ * *
* Detect a URLSearchParams object
* ref : https : //github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
*
* @ param Object obj Object to detect by type or brand
* @ return String
* /
function isURLSearchParams ( obj ) {
// Duck-typing as a necessary condition.
if ( typeof obj !== 'object' || typeof obj . append !== 'function' || typeof obj . delete !== 'function' || typeof obj . get !== 'function' || typeof obj . getAll !== 'function' || typeof obj . has !== 'function' || typeof obj . set !== 'function' ) {
return false ;
}
// Brand-checking and more duck-typing as optional condition.
return obj . constructor . name === 'URLSearchParams' || Object . prototype . toString . call ( obj ) === '[object URLSearchParams]' || typeof obj . sort === 'function' ;
}
/ * *
* Check if ` obj ` is a W3C ` Blob ` object ( which ` File ` inherits from )
* @ param { * } obj
* @ return { boolean }
* /
function isBlob ( obj ) {
return typeof obj === 'object' && typeof obj . arrayBuffer === 'function' && typeof obj . type === 'string' && typeof obj . stream === 'function' && typeof obj . constructor === 'function' && typeof obj . constructor . name === 'string' && /^(Blob|File)$/ . test ( obj . constructor . name ) && /^(Blob|File)$/ . test ( obj [ Symbol . toStringTag ] ) ;
}
/ * *
* Clone body given Res / Req instance
*
* @ param Mixed instance Response or Request instance
* @ return Mixed
* /
function clone ( instance ) {
let p1 , p2 ;
let body = instance . body ;
// don't allow cloning a used body
if ( instance . bodyUsed ) {
throw new Error ( 'cannot clone body after it is used' ) ;
}
// check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if ( body instanceof Stream && typeof body . getBoundary !== 'function' ) {
// tee instance body
p1 = new PassThrough ( ) ;
p2 = new PassThrough ( ) ;
body . pipe ( p1 ) ;
body . pipe ( p2 ) ;
// set instance body to teed body and return the other teed body
instance [ INTERNALS ] . body = p1 ;
body = p2 ;
}
return body ;
}
/ * *
* Performs the operation "extract a `Content-Type` value from |object|" as
* specified in the specification :
* https : //fetch.spec.whatwg.org/#concept-bodyinit-extract
*
* This function assumes that instance . body is present .
*
* @ param Mixed instance Any options . body input
* /
function extractContentType ( body ) {
if ( body === null ) {
// body is null
return null ;
} else if ( typeof body === 'string' ) {
// body is string
return 'text/plain;charset=UTF-8' ;
} else if ( isURLSearchParams ( body ) ) {
// body is a URLSearchParams
return 'application/x-www-form-urlencoded;charset=UTF-8' ;
} else if ( isBlob ( body ) ) {
// body is blob
return body . type || null ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
return null ;
} else if ( Object . prototype . toString . call ( body ) === '[object ArrayBuffer]' ) {
// body is ArrayBuffer
return null ;
} else if ( ArrayBuffer . isView ( body ) ) {
// body is ArrayBufferView
return null ;
} else if ( typeof body . getBoundary === 'function' ) {
// detect form data input from form-data module
return ` multipart/form-data;boundary= ${ body . getBoundary ( ) } ` ;
} else if ( body instanceof Stream ) {
// body is stream
// can't really do much about this
return null ;
} else {
// Body constructor defaults other things to string
return 'text/plain;charset=UTF-8' ;
}
}
/ * *
* The Fetch Standard treats this as if "total bytes" is a property on the body .
* For us , we have to explicitly get it with a function .
*
* ref : https : //fetch.spec.whatwg.org/#concept-body-total-bytes
*
* @ param Body instance Instance of Body
* @ return Number ? Number of bytes , or null if not possible
* /
function getTotalBytes ( instance ) {
const body = instance . body ;
if ( body === null ) {
// body is null
return 0 ;
} else if ( isBlob ( body ) ) {
return body . size ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
return body . length ;
} else if ( body && typeof body . getLengthSync === 'function' ) {
// detect form data input from form-data module
if ( body . _lengthRetrievers && body . _lengthRetrievers . length == 0 || // 1.x
body . hasKnownLength && body . hasKnownLength ( ) ) {
// 2.x
return body . getLengthSync ( ) ;
}
return null ;
} else {
// body is stream
return null ;
}
}
/ * *
* Write a Body to a Node . js WritableStream ( e . g . http . Request ) object .
*
* @ param Body instance Instance of Body
* @ return Void
* /
function writeToStream ( dest , instance ) {
const body = instance . body ;
if ( body === null ) {
// body is null
dest . end ( ) ;
} else if ( isBlob ( body ) ) {
body . stream ( ) . pipe ( dest ) ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
dest . write ( body ) ;
dest . end ( ) ;
} else {
// body is stream
body . pipe ( dest ) ;
}
}
// expose Promise
Body . Promise = global . Promise ;
/ * *
* headers . js
*
* Headers class offers convenient helpers
* /
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/ ;
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ ;
function validateName ( name ) {
name = ` ${ name } ` ;
if ( invalidTokenRegex . test ( name ) || name === '' ) {
throw new TypeError ( ` ${ name } is not a legal HTTP header name ` ) ;
}
}
function validateValue ( value ) {
value = ` ${ value } ` ;
if ( invalidHeaderCharRegex . test ( value ) ) {
throw new TypeError ( ` ${ value } is not a legal HTTP header value ` ) ;
}
}
/ * *
* Find the key in the map object given a header name .
*
* Returns undefined if not found .
*
* @ param String name Header name
* @ return String | Undefined
* /
function find ( map , name ) {
name = name . toLowerCase ( ) ;
for ( const key in map ) {
if ( key . toLowerCase ( ) === name ) {
return key ;
}
}
return undefined ;
}
const MAP = Symbol ( 'map' ) ;
class Headers {
/ * *
* Headers class
*
* @ param Object headers Response headers
* @ return Void
* /
constructor ( ) {
let init = arguments . length > 0 && arguments [ 0 ] !== undefined ? arguments [ 0 ] : undefined ;
this [ MAP ] = Object . create ( null ) ;
if ( init instanceof Headers ) {
const rawHeaders = init . raw ( ) ;
const headerNames = Object . keys ( rawHeaders ) ;
for ( const headerName of headerNames ) {
for ( const value of rawHeaders [ headerName ] ) {
this . append ( headerName , value ) ;
}
}
return ;
}
// We don't worry about converting prop to ByteString here as append()
// will handle it.
if ( init == null ) ; else if ( typeof init === 'object' ) {
const method = init [ Symbol . iterator ] ;
if ( method != null ) {
if ( typeof method !== 'function' ) {
throw new TypeError ( 'Header pairs must be iterable' ) ;
}
// sequence<sequence<ByteString>>
// Note: per spec we have to first exhaust the lists then process them
const pairs = [ ] ;
for ( const pair of init ) {
if ( typeof pair !== 'object' || typeof pair [ Symbol . iterator ] !== 'function' ) {
throw new TypeError ( 'Each header pair must be iterable' ) ;
}
pairs . push ( Array . from ( pair ) ) ;
}
for ( const pair of pairs ) {
if ( pair . length !== 2 ) {
throw new TypeError ( 'Each header pair must be a name/value tuple' ) ;
}
this . append ( pair [ 0 ] , pair [ 1 ] ) ;
}
} else {
// record<ByteString, ByteString>
for ( const key of Object . keys ( init ) ) {
const value = init [ key ] ;
this . append ( key , value ) ;
}
}
} else {
throw new TypeError ( 'Provided initializer must be an object' ) ;
}
}
/ * *
* Return combined header value given name
*
* @ param String name Header name
* @ return Mixed
* /
get ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key === undefined ) {
return null ;
}
return this [ MAP ] [ key ] . join ( ', ' ) ;
}
/ * *
* Iterate over all headers
*
* @ param Function callback Executed for each item with parameters ( value , name , thisArg )
* @ param Boolean thisArg ` this ` context for callback function
* @ return Void
* /
forEach ( callback ) {
let thisArg = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : undefined ;
let pairs = getHeaders ( this ) ;
let i = 0 ;
while ( i < pairs . length ) {
var _pairs$i = pairs [ i ] ;
const name = _pairs$i [ 0 ] ,
value = _pairs$i [ 1 ] ;
callback . call ( thisArg , value , name , this ) ;
pairs = getHeaders ( this ) ;
i ++ ;
}
}
/ * *
* Overwrite header values given name
*
* @ param String name Header name
* @ param String value Header value
* @ return Void
* /
set ( name , value ) {
name = ` ${ name } ` ;
value = ` ${ value } ` ;
validateName ( name ) ;
validateValue ( value ) ;
const key = find ( this [ MAP ] , name ) ;
this [ MAP ] [ key !== undefined ? key : name ] = [ value ] ;
}
/ * *
* Append a value onto existing header
*
* @ param String name Header name
* @ param String value Header value
* @ return Void
* /
append ( name , value ) {
name = ` ${ name } ` ;
value = ` ${ value } ` ;
validateName ( name ) ;
validateValue ( value ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key !== undefined ) {
this [ MAP ] [ key ] . push ( value ) ;
} else {
this [ MAP ] [ name ] = [ value ] ;
}
}
/ * *
* Check for header name existence
*
* @ param String name Header name
* @ return Boolean
* /
has ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
return find ( this [ MAP ] , name ) !== undefined ;
}
/ * *
* Delete all header values given name
*
* @ param String name Header name
* @ return Void
* /
delete ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key !== undefined ) {
delete this [ MAP ] [ key ] ;
}
}
/ * *
* Return raw headers ( non - spec api )
*
* @ return Object
* /
raw ( ) {
return this [ MAP ] ;
}
/ * *
* Get an iterator on keys .
*
* @ return Iterator
* /
keys ( ) {
return createHeadersIterator ( this , 'key' ) ;
}
/ * *
* Get an iterator on values .
*
* @ return Iterator
* /
values ( ) {
return createHeadersIterator ( this , 'value' ) ;
}
/ * *
* Get an iterator on entries .
*
* This is the default iterator of the Headers object .
*
* @ return Iterator
* /
[ Symbol . iterator ] ( ) {
return createHeadersIterator ( this , 'key+value' ) ;
}
}
Headers . prototype . entries = Headers . prototype [ Symbol . iterator ] ;
Object . defineProperty ( Headers . prototype , Symbol . toStringTag , {
value : 'Headers' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
Object . defineProperties ( Headers . prototype , {
get : { enumerable : true } ,
forEach : { enumerable : true } ,
set : { enumerable : true } ,
append : { enumerable : true } ,
has : { enumerable : true } ,
delete : { enumerable : true } ,
keys : { enumerable : true } ,
values : { enumerable : true } ,
entries : { enumerable : true }
} ) ;
function getHeaders ( headers ) {
let kind = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : 'key+value' ;
const keys = Object . keys ( headers [ MAP ] ) . sort ( ) ;
return keys . map ( kind === 'key' ? function ( k ) {
return k . toLowerCase ( ) ;
} : kind === 'value' ? function ( k ) {
return headers [ MAP ] [ k ] . join ( ', ' ) ;
} : function ( k ) {
return [ k . toLowerCase ( ) , headers [ MAP ] [ k ] . join ( ', ' ) ] ;
} ) ;
}
const INTERNAL = Symbol ( 'internal' ) ;
function createHeadersIterator ( target , kind ) {
const iterator = Object . create ( HeadersIteratorPrototype ) ;
iterator [ INTERNAL ] = {
target ,
kind ,
index : 0
} ;
return iterator ;
}
const HeadersIteratorPrototype = Object . setPrototypeOf ( {
next ( ) {
// istanbul ignore if
if ( ! this || Object . getPrototypeOf ( this ) !== HeadersIteratorPrototype ) {
throw new TypeError ( 'Value of `this` is not a HeadersIterator' ) ;
}
var _INTERNAL = this [ INTERNAL ] ;
const target = _INTERNAL . target ,
kind = _INTERNAL . kind ,
index = _INTERNAL . index ;
const values = getHeaders ( target , kind ) ;
const len = values . length ;
if ( index >= len ) {
return {
value : undefined ,
done : true
} ;
}
this [ INTERNAL ] . index = index + 1 ;
return {
value : values [ index ] ,
done : false
} ;
}
} , Object . getPrototypeOf ( Object . getPrototypeOf ( [ ] [ Symbol . iterator ] ( ) ) ) ) ;
Object . defineProperty ( HeadersIteratorPrototype , Symbol . toStringTag , {
value : 'HeadersIterator' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
/ * *
* Export the Headers object in a form that Node . js can consume .
*
* @ param Headers headers
* @ return Object
* /
function exportNodeCompatibleHeaders ( headers ) {
const obj = Object . assign ( { _ _proto _ _ : null } , headers [ MAP ] ) ;
// http.request() only supports string as Host header. This hack makes
// specifying custom Host header possible.
const hostHeaderKey = find ( headers [ MAP ] , 'Host' ) ;
if ( hostHeaderKey !== undefined ) {
obj [ hostHeaderKey ] = obj [ hostHeaderKey ] [ 0 ] ;
}
return obj ;
}
/ * *
* Create a Headers object from an object of headers , ignoring those that do
* not conform to HTTP grammar productions .
*
* @ param Object obj Object of headers
* @ return Headers
* /
function createHeadersLenient ( obj ) {
const headers = new Headers ( ) ;
for ( const name of Object . keys ( obj ) ) {
if ( invalidTokenRegex . test ( name ) ) {
continue ;
}
if ( Array . isArray ( obj [ name ] ) ) {
for ( const val of obj [ name ] ) {
if ( invalidHeaderCharRegex . test ( val ) ) {
continue ;
}
if ( headers [ MAP ] [ name ] === undefined ) {
headers [ MAP ] [ name ] = [ val ] ;
} else {
headers [ MAP ] [ name ] . push ( val ) ;
}
}
} else if ( ! invalidHeaderCharRegex . test ( obj [ name ] ) ) {
headers [ MAP ] [ name ] = [ obj [ name ] ] ;
}
}
return headers ;
}
const INTERNALS$1 = Symbol ( 'Response internals' ) ;
// fix an issue where "STATUS_CODES" aren't a named export for node <10
const STATUS _CODES = http . STATUS _CODES ;
/ * *
* Response class
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
* /
class Response {
constructor ( ) {
let body = arguments . length > 0 && arguments [ 0 ] !== undefined ? arguments [ 0 ] : null ;
let opts = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ;
Body . call ( this , body , opts ) ;
const status = opts . status || 200 ;
const headers = new Headers ( opts . headers ) ;
if ( body != null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( body ) ;
if ( contentType ) {
headers . append ( 'Content-Type' , contentType ) ;
}
}
this [ INTERNALS$1 ] = {
url : opts . url ,
status ,
statusText : opts . statusText || STATUS _CODES [ status ] ,
headers ,
counter : opts . counter
} ;
}
get url ( ) {
return this [ INTERNALS$1 ] . url || '' ;
}
get status ( ) {
return this [ INTERNALS$1 ] . status ;
}
/ * *
* Convenience property representing if the request ended normally
* /
get ok ( ) {
return this [ INTERNALS$1 ] . status >= 200 && this [ INTERNALS$1 ] . status < 300 ;
}
get redirected ( ) {
return this [ INTERNALS$1 ] . counter > 0 ;
}
get statusText ( ) {
return this [ INTERNALS$1 ] . statusText ;
}
get headers ( ) {
return this [ INTERNALS$1 ] . headers ;
}
/ * *
* Clone this response
*
* @ return Response
* /
clone ( ) {
return new Response ( clone ( this ) , {
url : this . url ,
status : this . status ,
statusText : this . statusText ,
headers : this . headers ,
ok : this . ok ,
redirected : this . redirected
} ) ;
}
}
Body . mixIn ( Response . prototype ) ;
Object . defineProperties ( Response . prototype , {
url : { enumerable : true } ,
status : { enumerable : true } ,
ok : { enumerable : true } ,
redirected : { enumerable : true } ,
statusText : { enumerable : true } ,
headers : { enumerable : true } ,
clone : { enumerable : true }
} ) ;
Object . defineProperty ( Response . prototype , Symbol . toStringTag , {
value : 'Response' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
const INTERNALS$2 = Symbol ( 'Request internals' ) ;
// fix an issue where "format", "parse" aren't a named export for node <10
const parse _url = Url . parse ;
const format _url = Url . format ;
const streamDestructionSupported = 'destroy' in Stream . Readable . prototype ;
/ * *
* Check if a value is an instance of Request .
*
* @ param Mixed input
* @ return Boolean
* /
function isRequest ( input ) {
return typeof input === 'object' && typeof input [ INTERNALS$2 ] === 'object' ;
}
function isAbortSignal ( signal ) {
const proto = signal && typeof signal === 'object' && Object . getPrototypeOf ( signal ) ;
return ! ! ( proto && proto . constructor . name === 'AbortSignal' ) ;
}
/ * *
* Request class
*
* @ param Mixed input Url or Request instance
* @ param Object init Custom options
* @ return Void
* /
class Request {
constructor ( input ) {
let init = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ;
let parsedURL ;
// normalize input
if ( ! isRequest ( input ) ) {
if ( input && input . href ) {
// in order to support Node.js' Url objects; though WHATWG's URL objects
// will fall into this branch also (since their `toString()` will return
// `href` property anyway)
parsedURL = parse _url ( input . href ) ;
} else {
// coerce input to a string before attempting to parse
parsedURL = parse _url ( ` ${ input } ` ) ;
}
input = { } ;
} else {
parsedURL = parse _url ( input . url ) ;
}
let method = init . method || input . method || 'GET' ;
method = method . toUpperCase ( ) ;
if ( ( init . body != null || isRequest ( input ) && input . body !== null ) && ( method === 'GET' || method === 'HEAD' ) ) {
throw new TypeError ( 'Request with GET/HEAD method cannot have body' ) ;
}
let inputBody = init . body != null ? init . body : isRequest ( input ) && input . body !== null ? clone ( input ) : null ;
Body . call ( this , inputBody , {
timeout : init . timeout || input . timeout || 0 ,
size : init . size || input . size || 0
} ) ;
const headers = new Headers ( init . headers || input . headers || { } ) ;
if ( inputBody != null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( inputBody ) ;
if ( contentType ) {
headers . append ( 'Content-Type' , contentType ) ;
}
}
let signal = isRequest ( input ) ? input . signal : null ;
if ( 'signal' in init ) signal = init . signal ;
if ( signal != null && ! isAbortSignal ( signal ) ) {
throw new TypeError ( 'Expected signal to be an instanceof AbortSignal' ) ;
}
this [ INTERNALS$2 ] = {
method ,
redirect : init . redirect || input . redirect || 'follow' ,
headers ,
parsedURL ,
signal
} ;
// node-fetch-only options
this . follow = init . follow !== undefined ? init . follow : input . follow !== undefined ? input . follow : 20 ;
this . compress = init . compress !== undefined ? init . compress : input . compress !== undefined ? input . compress : true ;
this . counter = init . counter || input . counter || 0 ;
this . agent = init . agent || input . agent ;
}
get method ( ) {
return this [ INTERNALS$2 ] . method ;
}
get url ( ) {
return format _url ( this [ INTERNALS$2 ] . parsedURL ) ;
}
get headers ( ) {
return this [ INTERNALS$2 ] . headers ;
}
get redirect ( ) {
return this [ INTERNALS$2 ] . redirect ;
}
get signal ( ) {
return this [ INTERNALS$2 ] . signal ;
}
/ * *
* Clone this request
*
* @ return Request
* /
clone ( ) {
return new Request ( this ) ;
}
}
Body . mixIn ( Request . prototype ) ;
Object . defineProperty ( Request . prototype , Symbol . toStringTag , {
value : 'Request' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
Object . defineProperties ( Request . prototype , {
method : { enumerable : true } ,
url : { enumerable : true } ,
headers : { enumerable : true } ,
redirect : { enumerable : true } ,
clone : { enumerable : true } ,
signal : { enumerable : true }
} ) ;
/ * *
* Convert a Request to Node . js http request options .
*
* @ param Request A Request instance
* @ return Object The options object to be passed to http . request
* /
function getNodeRequestOptions ( request ) {
const parsedURL = request [ INTERNALS$2 ] . parsedURL ;
const headers = new Headers ( request [ INTERNALS$2 ] . headers ) ;
// fetch step 1.3
if ( ! headers . has ( 'Accept' ) ) {
headers . set ( 'Accept' , '*/*' ) ;
}
// Basic fetch
if ( ! parsedURL . protocol || ! parsedURL . hostname ) {
throw new TypeError ( 'Only absolute URLs are supported' ) ;
}
if ( ! /^https?:$/ . test ( parsedURL . protocol ) ) {
throw new TypeError ( 'Only HTTP(S) protocols are supported' ) ;
}
if ( request . signal && request . body instanceof Stream . Readable && ! streamDestructionSupported ) {
throw new Error ( 'Cancellation of streamed requests with AbortSignal is not supported in node < 8' ) ;
}
// HTTP-network-or-cache fetch steps 2.4-2.7
let contentLengthValue = null ;
if ( request . body == null && /^(POST|PUT)$/i . test ( request . method ) ) {
contentLengthValue = '0' ;
}
if ( request . body != null ) {
const totalBytes = getTotalBytes ( request ) ;
if ( typeof totalBytes === 'number' ) {
contentLengthValue = String ( totalBytes ) ;
}
}
if ( contentLengthValue ) {
headers . set ( 'Content-Length' , contentLengthValue ) ;
}
// HTTP-network-or-cache fetch step 2.11
if ( ! headers . has ( 'User-Agent' ) ) {
headers . set ( 'User-Agent' , 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)' ) ;
}
// HTTP-network-or-cache fetch step 2.15
if ( request . compress && ! headers . has ( 'Accept-Encoding' ) ) {
headers . set ( 'Accept-Encoding' , 'gzip,deflate' ) ;
}
let agent = request . agent ;
if ( typeof agent === 'function' ) {
agent = agent ( parsedURL ) ;
}
if ( ! headers . has ( 'Connection' ) && ! agent ) {
headers . set ( 'Connection' , 'close' ) ;
}
// HTTP-network fetch step 4.2
// chunked encoding is handled by Node.js
return Object . assign ( { } , parsedURL , {
method : request . method ,
headers : exportNodeCompatibleHeaders ( headers ) ,
agent
} ) ;
}
/ * *
* abort - error . js
*
* AbortError interface for cancelled requests
* /
/ * *
* Create AbortError instance
*
* @ param String message Error message for human
* @ return AbortError
* /
function AbortError ( message ) {
Error . call ( this , message ) ;
this . type = 'aborted' ;
this . message = message ;
// hide custom error implementation details from end-users
Error . captureStackTrace ( this , this . constructor ) ;
}
AbortError . prototype = Object . create ( Error . prototype ) ;
AbortError . prototype . constructor = AbortError ;
AbortError . prototype . name = 'AbortError' ;
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
const PassThrough$1 = Stream . PassThrough ;
const resolve _url = Url . resolve ;
/ * *
* Fetch function
*
* @ param Mixed url Absolute url or Request instance
* @ param Object opts Fetch options
* @ return Promise
* /
function fetch ( url , opts ) {
// allow custom promise
if ( ! fetch . Promise ) {
throw new Error ( 'native promise missing, set fetch.Promise to your favorite alternative' ) ;
}
Body . Promise = fetch . Promise ;
// wrap http.request into fetch
return new fetch . Promise ( function ( resolve , reject ) {
// build request object
const request = new Request ( url , opts ) ;
const options = getNodeRequestOptions ( request ) ;
const send = ( options . protocol === 'https:' ? https : http ) . request ;
const signal = request . signal ;
let response = null ;
const abort = function abort ( ) {
let error = new AbortError ( 'The user aborted a request.' ) ;
reject ( error ) ;
if ( request . body && request . body instanceof Stream . Readable ) {
request . body . destroy ( error ) ;
}
if ( ! response || ! response . body ) return ;
response . body . emit ( 'error' , error ) ;
} ;
if ( signal && signal . aborted ) {
abort ( ) ;
return ;
}
const abortAndFinalize = function abortAndFinalize ( ) {
abort ( ) ;
finalize ( ) ;
} ;
// send request
const req = send ( options ) ;
let reqTimeout ;
if ( signal ) {
signal . addEventListener ( 'abort' , abortAndFinalize ) ;
}
function finalize ( ) {
req . abort ( ) ;
if ( signal ) signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
clearTimeout ( reqTimeout ) ;
}
if ( request . timeout ) {
req . once ( 'socket' , function ( socket ) {
reqTimeout = setTimeout ( function ( ) {
reject ( new FetchError ( ` network timeout at: ${ request . url } ` , 'request-timeout' ) ) ;
finalize ( ) ;
} , request . timeout ) ;
} ) ;
}
req . on ( 'error' , function ( err ) {
reject ( new FetchError ( ` request to ${ request . url } failed, reason: ${ err . message } ` , 'system' , err ) ) ;
finalize ( ) ;
} ) ;
req . on ( 'response' , function ( res ) {
clearTimeout ( reqTimeout ) ;
const headers = createHeadersLenient ( res . headers ) ;
// HTTP fetch step 5
if ( fetch . isRedirect ( res . statusCode ) ) {
// HTTP fetch step 5.2
const location = headers . get ( 'Location' ) ;
// HTTP fetch step 5.3
const locationURL = location === null ? null : resolve _url ( request . url , location ) ;
// HTTP fetch step 5.5
switch ( request . redirect ) {
case 'error' :
2020-09-13 06:08:13 +00:00
reject ( new FetchError ( ` uri requested responds with a redirect, redirect mode is set to error: ${ request . url } ` , 'no-redirect' ) ) ;
2020-08-22 14:57:57 +09:00
finalize ( ) ;
return ;
case 'manual' :
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
if ( locationURL !== null ) {
// handle corrupted header
try {
headers . set ( 'Location' , locationURL ) ;
} catch ( err ) {
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
reject ( err ) ;
}
}
break ;
case 'follow' :
// HTTP-redirect fetch step 2
if ( locationURL === null ) {
break ;
}
// HTTP-redirect fetch step 5
if ( request . counter >= request . follow ) {
reject ( new FetchError ( ` maximum redirect reached at: ${ request . url } ` , 'max-redirect' ) ) ;
finalize ( ) ;
return ;
}
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
const requestOpts = {
headers : new Headers ( request . headers ) ,
follow : request . follow ,
counter : request . counter + 1 ,
agent : request . agent ,
compress : request . compress ,
method : request . method ,
body : request . body ,
signal : request . signal ,
2020-09-13 06:08:13 +00:00
timeout : request . timeout ,
size : request . size
2020-08-22 14:57:57 +09:00
} ;
// HTTP-redirect fetch step 9
if ( res . statusCode !== 303 && request . body && getTotalBytes ( request ) === null ) {
reject ( new FetchError ( 'Cannot follow redirect with body being a readable stream' , 'unsupported-redirect' ) ) ;
finalize ( ) ;
return ;
}
// HTTP-redirect fetch step 11
if ( res . statusCode === 303 || ( res . statusCode === 301 || res . statusCode === 302 ) && request . method === 'POST' ) {
requestOpts . method = 'GET' ;
requestOpts . body = undefined ;
requestOpts . headers . delete ( 'content-length' ) ;
}
// HTTP-redirect fetch step 15
resolve ( fetch ( new Request ( locationURL , requestOpts ) ) ) ;
finalize ( ) ;
return ;
}
}
// prepare response
res . once ( 'end' , function ( ) {
if ( signal ) signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
} ) ;
let body = res . pipe ( new PassThrough$1 ( ) ) ;
const response _options = {
url : request . url ,
status : res . statusCode ,
statusText : res . statusMessage ,
headers : headers ,
size : request . size ,
timeout : request . timeout ,
counter : request . counter
} ;
// HTTP-network fetch step 12.1.1.3
const codings = headers . get ( 'Content-Encoding' ) ;
// HTTP-network fetch step 12.1.1.4: handle content codings
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if ( ! request . compress || request . method === 'HEAD' || codings === null || res . statusCode === 204 || res . statusCode === 304 ) {
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
flush : zlib . Z _SYNC _FLUSH ,
finishFlush : zlib . Z _SYNC _FLUSH
} ;
// for gzip
if ( codings == 'gzip' || codings == 'x-gzip' ) {
body = body . pipe ( zlib . createGunzip ( zlibOptions ) ) ;
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
// for deflate
if ( codings == 'deflate' || codings == 'x-deflate' ) {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = res . pipe ( new PassThrough$1 ( ) ) ;
raw . once ( 'data' , function ( chunk ) {
// see http://stackoverflow.com/questions/37519828
if ( ( chunk [ 0 ] & 0x0F ) === 0x08 ) {
body = body . pipe ( zlib . createInflate ( ) ) ;
} else {
body = body . pipe ( zlib . createInflateRaw ( ) ) ;
}
response = new Response ( body , response _options ) ;
resolve ( response ) ;
} ) ;
return ;
}
// for br
if ( codings == 'br' && typeof zlib . createBrotliDecompress === 'function' ) {
body = body . pipe ( zlib . createBrotliDecompress ( ) ) ;
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
// otherwise, use response as-is
response = new Response ( body , response _options ) ;
resolve ( response ) ;
} ) ;
writeToStream ( req , request ) ;
} ) ;
}
/ * *
* Redirect code matching
*
* @ param Number code Status code
* @ return Boolean
* /
fetch . isRedirect = function ( code ) {
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308 ;
} ;
// expose Promise
fetch . Promise = global . Promise ;
module . exports = exports = fetch ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . default = exports ;
exports . Headers = Headers ;
exports . Request = Request ;
exports . Response = Response ;
exports . FetchError = FetchError ;
/***/ } ) ,
/***/ 481 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
class Deprecation extends Error {
constructor ( message ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
this . name = 'Deprecation' ;
}
}
exports . Deprecation = Deprecation ;
/***/ } ) ,
/***/ 493 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const os = _ _webpack _require _ _ ( 87 ) ;
const nameMap = new Map ( [
[ 19 , 'Catalina' ] ,
[ 18 , 'Mojave' ] ,
[ 17 , 'High Sierra' ] ,
[ 16 , 'Sierra' ] ,
[ 15 , 'El Capitan' ] ,
[ 14 , 'Yosemite' ] ,
[ 13 , 'Mavericks' ] ,
[ 12 , 'Mountain Lion' ] ,
[ 11 , 'Lion' ] ,
[ 10 , 'Snow Leopard' ] ,
[ 9 , 'Leopard' ] ,
[ 8 , 'Tiger' ] ,
[ 7 , 'Panther' ] ,
[ 6 , 'Jaguar' ] ,
[ 5 , 'Puma' ]
] ) ;
const macosRelease = release => {
release = Number ( ( release || os . release ( ) ) . split ( '.' ) [ 0 ] ) ;
return {
name : nameMap . get ( release ) ,
version : '10.' + ( release - 4 )
} ;
} ;
module . exports = macosRelease ;
// TODO: remove this in the next major version
module . exports . default = macosRelease ;
/***/ } ) ,
/***/ 502 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const path = _ _webpack _require _ _ ( 622 ) ;
const pathKey = _ _webpack _require _ _ ( 539 ) ;
module . exports = opts => {
opts = Object . assign ( {
cwd : process . cwd ( ) ,
path : process . env [ pathKey ( ) ]
} , opts ) ;
let prev ;
let pth = path . resolve ( opts . cwd ) ;
const ret = [ ] ;
while ( prev !== pth ) {
ret . push ( path . join ( pth , 'node_modules/.bin' ) ) ;
prev = pth ;
pth = path . resolve ( pth , '..' ) ;
}
// ensure the running `node` binary is used
ret . push ( path . dirname ( process . execPath ) ) ;
return ret . concat ( opts . path ) . join ( path . delimiter ) ;
} ;
module . exports . env = opts => {
opts = Object . assign ( {
env : process . env
} , opts ) ;
const env = Object . assign ( { } , opts . env ) ;
const path = pathKey ( { env } ) ;
opts . path = env [ path ] ;
env [ path ] = module . exports ( opts ) ;
return env ;
} ;
/***/ } ) ,
/***/ 515 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const os = _ _webpack _require _ _ ( 87 ) ;
const execa = _ _webpack _require _ _ ( 447 ) ;
// Reference: https://www.gaijin.at/en/lstwinver.php
const names = new Map ( [
[ '10.0' , '10' ] ,
[ '6.3' , '8.1' ] ,
[ '6.2' , '8' ] ,
[ '6.1' , '7' ] ,
[ '6.0' , 'Vista' ] ,
[ '5.2' , 'Server 2003' ] ,
[ '5.1' , 'XP' ] ,
[ '5.0' , '2000' ] ,
[ '4.9' , 'ME' ] ,
[ '4.1' , '98' ] ,
[ '4.0' , '95' ]
] ) ;
const windowsRelease = release => {
const version = /\d+\.\d/ . exec ( release || os . release ( ) ) ;
if ( release && ! version ) {
throw new Error ( '`release` argument doesn\'t match `n.n`' ) ;
}
const ver = ( version || [ ] ) [ 0 ] ;
// Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime.
// If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version
// then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx
// If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead.
// If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name.
if ( ( ! release || release === os . release ( ) ) && [ '6.1' , '6.2' , '6.3' , '10.0' ] . includes ( ver ) ) {
let stdout ;
try {
stdout = execa . sync ( 'powershell' , [ '(Get-CimInstance -ClassName Win32_OperatingSystem).caption' ] ) . stdout || '' ;
} catch ( _ ) {
stdout = execa . sync ( 'wmic' , [ 'os' , 'get' , 'Caption' ] ) . stdout || '' ;
}
const year = ( stdout . match ( /2008|2012|2016|2019/ ) || [ ] ) [ 0 ] ;
if ( year ) {
return ` Server ${ year } ` ;
}
}
return names . get ( ver ) ;
} ;
module . exports = windowsRelease ;
/***/ } ) ,
/***/ 537 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var deprecation = _ _webpack _require _ _ ( 481 ) ;
var once = _interopDefault ( _ _webpack _require _ _ ( 223 ) ) ;
const logOnce = once ( deprecation => console . warn ( deprecation ) ) ;
/ * *
* Error with extra properties to help with debugging
* /
class RequestError extends Error {
constructor ( message , statusCode , options ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
this . name = "HttpError" ;
this . status = statusCode ;
Object . defineProperty ( this , "code" , {
get ( ) {
logOnce ( new deprecation . Deprecation ( "[@octokit/request-error] `error.code` is deprecated, use `error.status`." ) ) ;
return statusCode ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
} ) ;
this . headers = options . headers || { } ; // redact request credentials without mutating original request options
const requestCopy = Object . assign ( { } , options . request ) ;
if ( options . request . headers . authorization ) {
requestCopy . headers = Object . assign ( { } , options . request . headers , {
authorization : options . request . headers . authorization . replace ( / .*$/ , " [REDACTED]" )
} ) ;
}
requestCopy . url = requestCopy . url // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
. replace ( /\bclient_secret=\w+/g , "client_secret=[REDACTED]" ) // OAuth tokens can be passed as URL query parameters, although it is not recommended
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
. replace ( /\baccess_token=\w+/g , "access_token=[REDACTED]" ) ;
this . request = requestCopy ;
}
}
exports . RequestError = RequestError ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 539 :
/***/ ( function ( module ) {
"use strict" ;
module . exports = opts => {
opts = opts || { } ;
const env = opts . env || process . env ;
const platform = opts . platform || process . platform ;
if ( platform !== 'win32' ) {
return 'PATH' ;
}
return Object . keys ( env ) . find ( x => x . toUpperCase ( ) === 'PATH' ) || 'Path' ;
} ;
/***/ } ) ,
/***/ 549 :
/***/ ( function ( module ) {
module . exports = addHook
function addHook ( state , kind , name , hook ) {
var orig = hook
if ( ! state . registry [ name ] ) {
state . registry [ name ] = [ ]
}
if ( kind === 'before' ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( orig . bind ( null , options ) )
. then ( method . bind ( null , options ) )
}
}
if ( kind === 'after' ) {
hook = function ( method , options ) {
var result
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. then ( function ( result _ ) {
result = result _
return orig ( result , options )
} )
. then ( function ( ) {
return result
} )
}
}
if ( kind === 'error' ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. catch ( function ( error ) {
return orig ( error , options )
} )
}
}
state . registry [ name ] . push ( {
hook : hook ,
orig : orig
} )
}
/***/ } ) ,
/***/ 554 :
/***/ ( function ( module ) {
"use strict" ;
var isStream = module . exports = function ( stream ) {
return stream !== null && typeof stream === 'object' && typeof stream . pipe === 'function' ;
} ;
isStream . writable = function ( stream ) {
return isStream ( stream ) && stream . writable !== false && typeof stream . _write === 'function' && typeof stream . _writableState === 'object' ;
} ;
isStream . readable = function ( stream ) {
return isStream ( stream ) && stream . readable !== false && typeof stream . _read === 'function' && typeof stream . _readableState === 'object' ;
} ;
isStream . duplex = function ( stream ) {
return isStream . writable ( stream ) && isStream . readable ( stream ) ;
} ;
isStream . transform = function ( stream ) {
return isStream . duplex ( stream ) && typeof stream . _transform === 'function' && typeof stream . _transformState === 'object' ;
} ;
/***/ } ) ,
/***/ 560 :
/***/ ( function ( module ) {
"use strict" ;
/ * *
* Tries to execute a function and discards any error that occurs .
* @ param { Function } fn - Function that might or might not throw an error .
* @ returns { ? * } Return - value of the function when no error occurred .
* /
module . exports = function ( fn ) {
try { return fn ( ) } catch ( e ) { }
}
/***/ } ) ,
/***/ 585 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const { PassThrough } = _ _webpack _require _ _ ( 413 ) ;
module . exports = options => {
options = Object . assign ( { } , options ) ;
const { array } = options ;
let { encoding } = options ;
const buffer = encoding === 'buffer' ;
let objectMode = false ;
if ( array ) {
objectMode = ! ( encoding || buffer ) ;
} else {
encoding = encoding || 'utf8' ;
}
if ( buffer ) {
encoding = null ;
}
let len = 0 ;
const ret = [ ] ;
const stream = new PassThrough ( { objectMode } ) ;
if ( encoding ) {
stream . setEncoding ( encoding ) ;
}
stream . on ( 'data' , chunk => {
ret . push ( chunk ) ;
if ( objectMode ) {
len = ret . length ;
} else {
len += chunk . length ;
}
} ) ;
stream . getBufferedValue = ( ) => {
if ( array ) {
return ret ;
}
return buffer ? Buffer . concat ( ret , len ) : ret . join ( '' ) ;
} ;
stream . getBufferedLength = ( ) => len ;
return stream ;
} ;
/***/ } ) ,
/***/ 605 :
/***/ ( function ( module ) {
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 614 :
/***/ ( function ( module ) {
module . exports = require ( "events" ) ;
/***/ } ) ,
/***/ 622 :
/***/ ( function ( module ) {
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 631 :
/***/ ( function ( module ) {
module . exports = require ( "net" ) ;
/***/ } ) ,
/***/ 638 :
/***/ ( function ( module ) {
"use strict" ;
module . exports = /^#!.*/ ;
/***/ } ) ,
/***/ 668 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
var request = _ _webpack _require _ _ ( 234 ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
const VERSION = "4.5.0" ;
class GraphqlError extends Error {
constructor ( request , response ) {
const message = response . data . errors [ 0 ] . message ;
super ( message ) ;
Object . assign ( this , response . data ) ;
this . name = "GraphqlError" ;
this . request = request ; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
}
}
const NON _VARIABLE _OPTIONS = [ "method" , "baseUrl" , "url" , "headers" , "request" , "query" , "mediaType" ] ;
function graphql ( request , query , options ) {
options = typeof query === "string" ? options = Object . assign ( {
query
} , options ) : options = query ;
const requestOptions = Object . keys ( options ) . reduce ( ( result , key ) => {
if ( NON _VARIABLE _OPTIONS . includes ( key ) ) {
result [ key ] = options [ key ] ;
return result ;
}
if ( ! result . variables ) {
result . variables = { } ;
}
result . variables [ key ] = options [ key ] ;
return result ;
} , { } ) ;
return request ( requestOptions ) . then ( response => {
if ( response . data . errors ) {
throw new GraphqlError ( requestOptions , {
data : response . data
} ) ;
}
return response . data . data ;
} ) ;
}
function withDefaults ( request$1 , newDefaults ) {
const newRequest = request$1 . defaults ( newDefaults ) ;
const newApi = ( query , options ) => {
return graphql ( newRequest , query , options ) ;
} ;
return Object . assign ( newApi , {
defaults : withDefaults . bind ( null , newRequest ) ,
endpoint : request . request . endpoint
} ) ;
}
const graphql$1 = withDefaults ( request . request , {
headers : {
"user-agent" : ` octokit-graphql.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } `
2020-06-06 12:12:17 +09:00
} ,
2020-08-22 14:57:57 +09:00
method : "POST" ,
url : "/graphql"
} ) ;
function withCustomRequest ( customRequest ) {
return withDefaults ( customRequest , {
method : "POST" ,
url : "/graphql"
} ) ;
}
exports . graphql = graphql$1 ;
exports . withCustomRequest = withCustomRequest ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 669 :
/***/ ( function ( module ) {
module . exports = require ( "util" ) ;
/***/ } ) ,
/***/ 670 :
/***/ ( function ( module ) {
module . exports = register
function register ( state , name , method , options ) {
if ( typeof method !== 'function' ) {
throw new Error ( 'method for before hook must be a function' )
}
if ( ! options ) {
options = { }
}
if ( Array . isArray ( name ) ) {
return name . reverse ( ) . reduce ( function ( callback , name ) {
return register . bind ( null , state , name , callback , options )
} , method ) ( )
}
return Promise . resolve ( )
. then ( function ( ) {
if ( ! state . registry [ name ] ) {
return method ( options )
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
return ( state . registry [ name ] ) . reduce ( function ( method , registered ) {
return registered . hook . bind ( null , method , options )
} , method ) ( )
} )
}
/***/ } ) ,
/***/ 682 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var register = _ _webpack _require _ _ ( 670 )
var addHook = _ _webpack _require _ _ ( 549 )
var removeHook = _ _webpack _require _ _ ( 819 )
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function . bind
var bindable = bind . bind ( bind )
function bindApi ( hook , state , name ) {
var removeHookRef = bindable ( removeHook , null ) . apply ( null , name ? [ state , name ] : [ state ] )
hook . api = { remove : removeHookRef }
hook . remove = removeHookRef
; [ 'before' , 'error' , 'after' , 'wrap' ] . forEach ( function ( kind ) {
var args = name ? [ state , kind , name ] : [ state , kind ]
hook [ kind ] = hook . api [ kind ] = bindable ( addHook , null ) . apply ( null , args )
} )
}
function HookSingular ( ) {
var singularHookName = 'h'
var singularHookState = {
registry : { }
}
var singularHook = register . bind ( null , singularHookState , singularHookName )
bindApi ( singularHook , singularHookState , singularHookName )
return singularHook
}
function HookCollection ( ) {
var state = {
registry : { }
}
var hook = register . bind ( null , state )
bindApi ( hook , state )
return hook
}
var collectionHookDeprecationMessageDisplayed = false
function Hook ( ) {
if ( ! collectionHookDeprecationMessageDisplayed ) {
console . warn ( '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' )
collectionHookDeprecationMessageDisplayed = true
}
return HookCollection ( )
}
Hook . Singular = HookSingular . bind ( )
Hook . Collection = HookCollection . bind ( )
module . exports = Hook
// expose constructors as a named property for TypeScript
module . exports . Hook = Hook
module . exports . Singular = Hook . Singular
module . exports . Collection = Hook . Collection
/***/ } ) ,
/***/ 689 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
// Older verions of Node.js might not have `util.getSystemErrorName()`.
// In that case, fall back to a deprecated internal.
const util = _ _webpack _require _ _ ( 669 ) ;
let uv ;
if ( typeof util . getSystemErrorName === 'function' ) {
module . exports = util . getSystemErrorName ;
} else {
try {
uv = process . binding ( 'uv' ) ;
if ( typeof uv . errname !== 'function' ) {
throw new TypeError ( 'uv.errname is not a function' ) ;
}
} catch ( err ) {
console . error ( 'execa/lib/errname: unable to establish process.binding(\'uv\')' , err ) ;
uv = null ;
}
module . exports = code => errname ( uv , code ) ;
}
// Used for testing the fallback behavior
module . exports . _ _test _ _ = errname ;
function errname ( uv , code ) {
if ( uv ) {
return uv . errname ( code ) ;
}
if ( ! ( code < 0 ) ) {
throw new Error ( 'err >= 0' ) ;
}
return ` Unknown system error ${ code } ` ;
}
/***/ } ) ,
/***/ 710 :
/***/ ( function ( module ) {
// This is not the set of all possible signals.
//
// It IS, however, the set of all signals that trigger
// an exit on either Linux or BSD systems. Linux is a
// superset of the signal names supported on BSD, and
// the unknown signals just fail to register, so we can
// catch that easily enough.
//
// Don't bother with SIGKILL. It's uncatchable, which
// means that we can't fire any callbacks anyway.
//
// If a user does happen to register a handler on a non-
// fatal signal like SIGWINCH or something, and then
// exit, it'll end up firing `process.emit('exit')`, so
// the handler will be fired anyway.
//
// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
// artificially, inherently leave the process in a
// state from which it is not safe to try and enter JS
// listeners.
module . exports = [
'SIGABRT' ,
'SIGALRM' ,
'SIGHUP' ,
'SIGINT' ,
'SIGTERM'
]
if ( process . platform !== 'win32' ) {
module . exports . push (
'SIGVTALRM' ,
'SIGXCPU' ,
'SIGXFSZ' ,
'SIGUSR2' ,
'SIGTRAP' ,
'SIGSYS' ,
'SIGQUIT' ,
'SIGIOT'
// should detect profiler and enable/disable accordingly.
// see #21
// 'SIGPROF'
)
}
if ( process . platform === 'linux' ) {
module . exports . push (
'SIGIO' ,
'SIGPOLL' ,
'SIGPWR' ,
'SIGSTKFLT' ,
'SIGUNUSED'
)
}
/***/ } ) ,
/***/ 728 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = isexe
isexe . sync = sync
var fs = _ _webpack _require _ _ ( 747 )
function isexe ( path , options , cb ) {
fs . stat ( path , function ( er , stat ) {
cb ( er , er ? false : checkStat ( stat , options ) )
} )
}
function sync ( path , options ) {
return checkStat ( fs . statSync ( path ) , options )
}
function checkStat ( stat , options ) {
return stat . isFile ( ) && checkMode ( stat , options )
}
function checkMode ( stat , options ) {
var mod = stat . mode
var uid = stat . uid
var gid = stat . gid
var myUid = options . uid !== undefined ?
options . uid : process . getuid && process . getuid ( )
var myGid = options . gid !== undefined ?
options . gid : process . getgid && process . getgid ( )
var u = parseInt ( '100' , 8 )
var g = parseInt ( '010' , 8 )
var o = parseInt ( '001' , 8 )
var ug = u | g
var ret = ( mod & o ) ||
( mod & g ) && gid === myGid ||
( mod & u ) && uid === myUid ||
( mod & ug ) && myUid === 0
return ret
}
/***/ } ) ,
/***/ 746 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const cp = _ _webpack _require _ _ ( 129 ) ;
const parse = _ _webpack _require _ _ ( 855 ) ;
const enoent = _ _webpack _require _ _ ( 101 ) ;
function spawn ( command , args , options ) {
// Parse the arguments
const parsed = parse ( command , args , options ) ;
// Spawn the child process
const spawned = cp . spawn ( parsed . command , parsed . args , parsed . options ) ;
// Hook into child process "exit" event to emit an error if the command
// does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
enoent . hookChildProcess ( spawned , parsed ) ;
return spawned ;
}
function spawnSync ( command , args , options ) {
// Parse the arguments
const parsed = parse ( command , args , options ) ;
// Spawn the child process
const result = cp . spawnSync ( parsed . command , parsed . args , parsed . options ) ;
// Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
result . error = result . error || enoent . verifyENOENTSync ( result . status , parsed ) ;
return result ;
}
module . exports = spawn ;
module . exports . spawn = spawn ;
module . exports . sync = spawnSync ;
module . exports . _parse = parse ;
module . exports . _enoent = enoent ;
/***/ } ) ,
/***/ 747 :
/***/ ( function ( module ) {
module . exports = require ( "fs" ) ;
/***/ } ) ,
/***/ 761 :
/***/ ( function ( module ) {
module . exports = require ( "zlib" ) ;
/***/ } ) ,
/***/ 762 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
var beforeAfterHook = _ _webpack _require _ _ ( 682 ) ;
var request = _ _webpack _require _ _ ( 234 ) ;
var graphql = _ _webpack _require _ _ ( 668 ) ;
var authToken = _ _webpack _require _ _ ( 334 ) ;
function _defineProperty ( obj , key , value ) {
if ( key in obj ) {
Object . defineProperty ( obj , key , {
value : value ,
enumerable : true ,
configurable : true ,
writable : true
} ) ;
} else {
obj [ key ] = value ;
}
return obj ;
}
function ownKeys ( object , enumerableOnly ) {
var keys = Object . keys ( object ) ;
if ( Object . getOwnPropertySymbols ) {
var symbols = Object . getOwnPropertySymbols ( object ) ;
if ( enumerableOnly ) symbols = symbols . filter ( function ( sym ) {
return Object . getOwnPropertyDescriptor ( object , sym ) . enumerable ;
} ) ;
keys . push . apply ( keys , symbols ) ;
}
return keys ;
}
function _objectSpread2 ( target ) {
for ( var i = 1 ; i < arguments . length ; i ++ ) {
var source = arguments [ i ] != null ? arguments [ i ] : { } ;
if ( i % 2 ) {
ownKeys ( Object ( source ) , true ) . forEach ( function ( key ) {
_defineProperty ( target , key , source [ key ] ) ;
} ) ;
} else if ( Object . getOwnPropertyDescriptors ) {
Object . defineProperties ( target , Object . getOwnPropertyDescriptors ( source ) ) ;
} else {
ownKeys ( Object ( source ) ) . forEach ( function ( key ) {
Object . defineProperty ( target , key , Object . getOwnPropertyDescriptor ( source , key ) ) ;
} ) ;
}
}
return target ;
}
const VERSION = "2.5.3" ;
let Octokit =
/** @class */
( ( ) => {
class Octokit {
constructor ( options = { } ) {
const hook = new beforeAfterHook . Collection ( ) ;
const requestDefaults = {
baseUrl : request . request . endpoint . DEFAULTS . baseUrl ,
headers : { } ,
request : Object . assign ( { } , options . request , {
hook : hook . bind ( null , "request" )
} ) ,
mediaType : {
previews : [ ] ,
format : ""
}
} ; // prepend default user agent with `options.userAgent` if set
requestDefaults . headers [ "user-agent" ] = [ options . userAgent , ` octokit-core.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } ` ] . filter ( Boolean ) . join ( " " ) ;
if ( options . baseUrl ) {
requestDefaults . baseUrl = options . baseUrl ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
if ( options . previews ) {
requestDefaults . mediaType . previews = options . previews ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
if ( options . timeZone ) {
requestDefaults . headers [ "time-zone" ] = options . timeZone ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
this . request = request . request . defaults ( requestDefaults ) ;
this . graphql = graphql . withCustomRequest ( this . request ) . defaults ( _objectSpread2 ( _objectSpread2 ( { } , requestDefaults ) , { } , {
baseUrl : requestDefaults . baseUrl . replace ( /\/api\/v3$/ , "/api" )
} ) ) ;
this . log = Object . assign ( {
debug : ( ) => { } ,
info : ( ) => { } ,
warn : console . warn . bind ( console ) ,
error : console . error . bind ( console )
} , options . log ) ;
this . hook = hook ; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
if ( ! options . authStrategy ) {
if ( ! options . auth ) {
// (1)
this . auth = async ( ) => ( {
type : "unauthenticated"
} ) ;
} else {
// (2)
const auth = authToken . createTokenAuth ( options . auth ) ; // @ts-ignore ¯\_(ツ)_/¯
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
}
} else {
const auth = options . authStrategy ( Object . assign ( {
request : this . request
} , options . auth ) ) ; // @ts-ignore ¯\_(ツ)_/¯
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
} // apply plugins
// https://stackoverflow.com/a/16345172
const classConstructor = this . constructor ;
classConstructor . plugins . forEach ( plugin => {
Object . assign ( this , plugin ( this , options ) ) ;
} ) ;
}
static defaults ( defaults ) {
const OctokitWithDefaults = class extends this {
constructor ( ... args ) {
const options = args [ 0 ] || { } ;
super ( Object . assign ( { } , defaults , options , options . userAgent && defaults . userAgent ? {
userAgent : ` ${ options . userAgent } ${ defaults . userAgent } `
} : null ) ) ;
}
} ;
return OctokitWithDefaults ;
}
/ * *
* Attach a plugin ( or many ) to your Octokit instance .
*
* @ example
* const API = Octokit . plugin ( plugin1 , plugin2 , plugin3 , ... )
* /
static plugin ( p1 , ... p2 ) {
var _a ;
if ( p1 instanceof Array ) {
console . warn ( [ "Passing an array of plugins to Octokit.plugin() has been deprecated." , "Instead of:" , " Octokit.plugin([plugin1, plugin2, ...])" , "Use:" , " Octokit.plugin(plugin1, plugin2, ...)" ] . join ( "\n" ) ) ;
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
const currentPlugins = this . plugins ;
let newPlugins = [ ... ( p1 instanceof Array ? p1 : [ p1 ] ) , ... p2 ] ;
const NewOctokit = ( _a = class extends this { } , _a . plugins = currentPlugins . concat ( newPlugins . filter ( plugin => ! currentPlugins . includes ( plugin ) ) ) , _a ) ;
return NewOctokit ;
}
}
Octokit . VERSION = VERSION ;
Octokit . plugins = [ ] ;
return Octokit ;
} ) ( ) ;
exports . Octokit = Octokit ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 766 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const pump = _ _webpack _require _ _ ( 341 ) ;
const bufferStream = _ _webpack _require _ _ ( 585 ) ;
class MaxBufferError extends Error {
constructor ( ) {
super ( 'maxBuffer exceeded' ) ;
this . name = 'MaxBufferError' ;
}
}
function getStream ( inputStream , options ) {
if ( ! inputStream ) {
return Promise . reject ( new Error ( 'Expected a stream' ) ) ;
}
options = Object . assign ( { maxBuffer : Infinity } , options ) ;
const { maxBuffer } = options ;
let stream ;
return new Promise ( ( resolve , reject ) => {
const rejectPromise = error => {
if ( error ) { // A null check
error . bufferedData = stream . getBufferedValue ( ) ;
}
reject ( error ) ;
} ;
stream = pump ( inputStream , bufferStream ( options ) , error => {
if ( error ) {
rejectPromise ( error ) ;
return ;
}
resolve ( ) ;
} ) ;
stream . on ( 'data' , ( ) => {
if ( stream . getBufferedLength ( ) > maxBuffer ) {
rejectPromise ( new MaxBufferError ( ) ) ;
}
} ) ;
} ) . then ( ( ) => stream . getBufferedValue ( ) ) ;
}
module . exports = getStream ;
module . exports . buffer = ( stream , options ) => getStream ( stream , Object . assign ( { } , options , { encoding : 'buffer' } ) ) ;
module . exports . array = ( stream , options ) => getStream ( stream , Object . assign ( { } , options , { array : true } ) ) ;
module . exports . MaxBufferError = MaxBufferError ;
/***/ } ) ,
/***/ 774 :
/***/ ( function ( module ) {
"use strict" ;
module . exports = function ( x ) {
var lf = typeof x === 'string' ? '\n' : '\n' . charCodeAt ( ) ;
var cr = typeof x === 'string' ? '\r' : '\r' . charCodeAt ( ) ;
if ( x [ x . length - 1 ] === lf ) {
x = x . slice ( 0 , x . length - 1 ) ;
}
if ( x [ x . length - 1 ] === cr ) {
x = x . slice ( 0 , x . length - 1 ) ;
}
return x ;
} ;
/***/ } ) ,
/***/ 819 :
/***/ ( function ( module ) {
module . exports = removeHook
function removeHook ( state , name , method ) {
if ( ! state . registry [ name ] ) {
return
}
var index = state . registry [ name ]
. map ( function ( registered ) { return registered . orig } )
. indexOf ( method )
if ( index === - 1 ) {
return
}
state . registry [ name ] . splice ( index , 1 )
}
/***/ } ) ,
/***/ 824 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const os = _ _webpack _require _ _ ( 87 ) ;
const macosRelease = _ _webpack _require _ _ ( 493 ) ;
const winRelease = _ _webpack _require _ _ ( 515 ) ;
const osName = ( platform , release ) => {
if ( ! platform && release ) {
throw new Error ( 'You can\'t specify a `release` without specifying `platform`' ) ;
}
platform = platform || os . platform ( ) ;
let id ;
if ( platform === 'darwin' ) {
if ( ! release && os . platform ( ) === 'darwin' ) {
release = os . release ( ) ;
}
const prefix = release ? ( Number ( release . split ( '.' ) [ 0 ] ) > 15 ? 'macOS' : 'OS X' ) : 'macOS' ;
id = release ? macosRelease ( release ) . name : '' ;
return prefix + ( id ? ' ' + id : '' ) ;
}
if ( platform === 'linux' ) {
if ( ! release && os . platform ( ) === 'linux' ) {
release = os . release ( ) ;
}
id = release ? release . replace ( /^(\d+\.\d+).*/ , '$1' ) : '' ;
return 'Linux' + ( id ? ' ' + id : '' ) ;
}
if ( platform === 'win32' ) {
if ( ! release && os . platform ( ) === 'win32' ) {
release = os . release ( ) ;
}
id = release ? winRelease ( release ) : '' ;
return 'Windows' + ( id ? ' ' + id : '' ) ;
}
return platform ;
} ;
module . exports = osName ;
/***/ } ) ,
/***/ 835 :
/***/ ( function ( module ) {
module . exports = require ( "url" ) ;
/***/ } ) ,
/***/ 840 :
/***/ ( function ( module ) {
"use strict" ;
/ * !
* isobject < https : //github.com/jonschlinkert/isobject>
*
* Copyright ( c ) 2014 - 2017 , Jon Schlinkert .
* Released under the MIT License .
* /
function isObject ( val ) {
return val != null && typeof val === 'object' && Array . isArray ( val ) === false ;
}
/ * !
* is - plain - object < https : //github.com/jonschlinkert/is-plain-object>
*
* Copyright ( c ) 2014 - 2017 , Jon Schlinkert .
* Released under the MIT License .
* /
function isObjectObject ( o ) {
return isObject ( o ) === true
&& Object . prototype . toString . call ( o ) === '[object Object]' ;
}
function isPlainObject ( o ) {
var ctor , prot ;
if ( isObjectObject ( o ) === false ) return false ;
// If has modified constructor
ctor = o . constructor ;
if ( typeof ctor !== 'function' ) return false ;
// If has modified prototype
prot = ctor . prototype ;
if ( isObjectObject ( prot ) === false ) return false ;
// If constructor does not have an Object-specific method
if ( prot . hasOwnProperty ( 'isPrototypeOf' ) === false ) {
return false ;
}
// Most likely a plain Object
return true ;
}
module . exports = isPlainObject ;
/***/ } ) ,
/***/ 855 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
const path = _ _webpack _require _ _ ( 622 ) ;
const niceTry = _ _webpack _require _ _ ( 560 ) ;
const resolveCommand = _ _webpack _require _ _ ( 274 ) ;
const escape = _ _webpack _require _ _ ( 48 ) ;
const readShebang = _ _webpack _require _ _ ( 252 ) ;
const semver = _ _webpack _require _ _ ( 911 ) ;
const isWin = process . platform === 'win32' ;
const isExecutableRegExp = /\.(?:com|exe)$/i ;
const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i ;
// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0
const supportsShellOption = niceTry ( ( ) => semver . satisfies ( process . version , '^4.8.0 || ^5.7.0 || >= 6.0.0' , true ) ) || false ;
function detectShebang ( parsed ) {
parsed . file = resolveCommand ( parsed ) ;
const shebang = parsed . file && readShebang ( parsed . file ) ;
if ( shebang ) {
parsed . args . unshift ( parsed . file ) ;
parsed . command = shebang ;
return resolveCommand ( parsed ) ;
}
return parsed . file ;
}
function parseNonShell ( parsed ) {
if ( ! isWin ) {
return parsed ;
}
// Detect & add support for shebangs
const commandFile = detectShebang ( parsed ) ;
// We don't need a shell if the command filename is an executable
const needsShell = ! isExecutableRegExp . test ( commandFile ) ;
// If a shell is required, use cmd.exe and take care of escaping everything correctly
// Note that `forceShell` is an hidden option used only in tests
if ( parsed . options . forceShell || needsShell ) {
// Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
// The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
// Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
// we need to double escape them
const needsDoubleEscapeMetaChars = isCmdShimRegExp . test ( commandFile ) ;
// Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
// This is necessary otherwise it will always fail with ENOENT in those cases
parsed . command = path . normalize ( parsed . command ) ;
// Escape command & arguments
parsed . command = escape . command ( parsed . command ) ;
parsed . args = parsed . args . map ( ( arg ) => escape . argument ( arg , needsDoubleEscapeMetaChars ) ) ;
const shellCommand = [ parsed . command ] . concat ( parsed . args ) . join ( ' ' ) ;
parsed . args = [ '/d' , '/s' , '/c' , ` " ${ shellCommand } " ` ] ;
parsed . command = process . env . comspec || 'cmd.exe' ;
parsed . options . windowsVerbatimArguments = true ; // Tell node's spawn that the arguments are already escaped
}
return parsed ;
}
function parseShell ( parsed ) {
// If node supports the shell option, there's no need to mimic its behavior
if ( supportsShellOption ) {
return parsed ;
}
// Mimic node shell option
// See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335
const shellCommand = [ parsed . command ] . concat ( parsed . args ) . join ( ' ' ) ;
if ( isWin ) {
parsed . command = typeof parsed . options . shell === 'string' ? parsed . options . shell : process . env . comspec || 'cmd.exe' ;
parsed . args = [ '/d' , '/s' , '/c' , ` " ${ shellCommand } " ` ] ;
parsed . options . windowsVerbatimArguments = true ; // Tell node's spawn that the arguments are already escaped
} else {
if ( typeof parsed . options . shell === 'string' ) {
parsed . command = parsed . options . shell ;
} else if ( process . platform === 'android' ) {
parsed . command = '/system/bin/sh' ;
} else {
parsed . command = '/bin/sh' ;
}
parsed . args = [ '-c' , shellCommand ] ;
}
return parsed ;
}
function parse ( command , args , options ) {
// Normalize arguments, similar to nodejs
if ( args && ! Array . isArray ( args ) ) {
options = args ;
args = null ;
}
args = args ? args . slice ( 0 ) : [ ] ; // Clone array to avoid changing the original
options = Object . assign ( { } , options ) ; // Clone object to avoid changing the original
// Build our parsed object
const parsed = {
command ,
args ,
options ,
file : undefined ,
original : {
command ,
args ,
} ,
} ;
// Delegate further parsing to shell or non-shell
return options . shell ? parseShell ( parsed ) : parseNonShell ( parsed ) ;
}
module . exports = parse ;
/***/ } ) ,
/***/ 877 :
/***/ ( function ( module ) {
module . exports = eval ( "require" ) ( "encoding" ) ;
/***/ } ) ,
/***/ 911 :
/***/ ( function ( module , exports ) {
exports = module . exports = SemVer
var debug
/* istanbul ignore next */
if ( typeof process === 'object' &&
process . env &&
process . env . NODE _DEBUG &&
/\bsemver\b/i . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments , 0 )
args . unshift ( 'SEMVER' )
console . log . apply ( console , args )
}
} else {
debug = function ( ) { }
}
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
exports . SEMVER _SPEC _VERSION = '2.0.0'
var MAX _LENGTH = 256
var MAX _SAFE _INTEGER = Number . MAX _SAFE _INTEGER ||
/* istanbul ignore next */ 9007199254740991
// Max safe segment length for coercion.
var MAX _SAFE _COMPONENT _LENGTH = 16
// The actual regexps go on exports.re
var re = exports . re = [ ]
var src = exports . src = [ ]
var R = 0
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
var NUMERICIDENTIFIER = R ++
src [ NUMERICIDENTIFIER ] = '0|[1-9]\\d*'
var NUMERICIDENTIFIERLOOSE = R ++
src [ NUMERICIDENTIFIERLOOSE ] = '[0-9]+'
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
var NONNUMERICIDENTIFIER = R ++
src [ NONNUMERICIDENTIFIER ] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
// ## Main Version
// Three dot-separated numeric identifiers.
var MAINVERSION = R ++
src [ MAINVERSION ] = '(' + src [ NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ NUMERICIDENTIFIER ] + ')'
var MAINVERSIONLOOSE = R ++
src [ MAINVERSIONLOOSE ] = '(' + src [ NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ NUMERICIDENTIFIERLOOSE ] + ')'
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
var PRERELEASEIDENTIFIER = R ++
src [ PRERELEASEIDENTIFIER ] = '(?:' + src [ NUMERICIDENTIFIER ] +
'|' + src [ NONNUMERICIDENTIFIER ] + ')'
var PRERELEASEIDENTIFIERLOOSE = R ++
src [ PRERELEASEIDENTIFIERLOOSE ] = '(?:' + src [ NUMERICIDENTIFIERLOOSE ] +
'|' + src [ NONNUMERICIDENTIFIER ] + ')'
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
var PRERELEASE = R ++
src [ PRERELEASE ] = '(?:-(' + src [ PRERELEASEIDENTIFIER ] +
'(?:\\.' + src [ PRERELEASEIDENTIFIER ] + ')*))'
var PRERELEASELOOSE = R ++
src [ PRERELEASELOOSE ] = '(?:-?(' + src [ PRERELEASEIDENTIFIERLOOSE ] +
'(?:\\.' + src [ PRERELEASEIDENTIFIERLOOSE ] + ')*))'
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
var BUILDIDENTIFIER = R ++
src [ BUILDIDENTIFIER ] = '[0-9A-Za-z-]+'
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
var BUILD = R ++
src [ BUILD ] = '(?:\\+(' + src [ BUILDIDENTIFIER ] +
'(?:\\.' + src [ BUILDIDENTIFIER ] + ')*))'
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
var FULL = R ++
var FULLPLAIN = 'v?' + src [ MAINVERSION ] +
src [ PRERELEASE ] + '?' +
src [ BUILD ] + '?'
src [ FULL ] = '^' + FULLPLAIN + '$'
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
var LOOSEPLAIN = '[v=\\s]*' + src [ MAINVERSIONLOOSE ] +
src [ PRERELEASELOOSE ] + '?' +
src [ BUILD ] + '?'
var LOOSE = R ++
src [ LOOSE ] = '^' + LOOSEPLAIN + '$'
var GTLT = R ++
src [ GTLT ] = '((?:<|>)?=?)'
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
var XRANGEIDENTIFIERLOOSE = R ++
src [ XRANGEIDENTIFIERLOOSE ] = src [ NUMERICIDENTIFIERLOOSE ] + '|x|X|\\*'
var XRANGEIDENTIFIER = R ++
src [ XRANGEIDENTIFIER ] = src [ NUMERICIDENTIFIER ] + '|x|X|\\*'
var XRANGEPLAIN = R ++
src [ XRANGEPLAIN ] = '[v=\\s]*(' + src [ XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ XRANGEIDENTIFIER ] + ')' +
'(?:' + src [ PRERELEASE ] + ')?' +
src [ BUILD ] + '?' +
')?)?'
var XRANGEPLAINLOOSE = R ++
src [ XRANGEPLAINLOOSE ] = '[v=\\s]*(' + src [ XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:' + src [ PRERELEASELOOSE ] + ')?' +
src [ BUILD ] + '?' +
')?)?'
var XRANGE = R ++
src [ XRANGE ] = '^' + src [ GTLT ] + '\\s*' + src [ XRANGEPLAIN ] + '$'
var XRANGELOOSE = R ++
src [ XRANGELOOSE ] = '^' + src [ GTLT ] + '\\s*' + src [ XRANGEPLAINLOOSE ] + '$'
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
var COERCE = R ++
src [ COERCE ] = '(?:^|[^\\d])' +
'(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '})' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:$|[^\\d])'
// Tilde ranges.
// Meaning is "reasonably at or greater than"
var LONETILDE = R ++
src [ LONETILDE ] = '(?:~>?)'
var TILDETRIM = R ++
src [ TILDETRIM ] = '(\\s*)' + src [ LONETILDE ] + '\\s+'
re [ TILDETRIM ] = new RegExp ( src [ TILDETRIM ] , 'g' )
var tildeTrimReplace = '$1~'
var TILDE = R ++
src [ TILDE ] = '^' + src [ LONETILDE ] + src [ XRANGEPLAIN ] + '$'
var TILDELOOSE = R ++
src [ TILDELOOSE ] = '^' + src [ LONETILDE ] + src [ XRANGEPLAINLOOSE ] + '$'
// Caret ranges.
// Meaning is "at least and backwards compatible with"
var LONECARET = R ++
src [ LONECARET ] = '(?:\\^)'
var CARETTRIM = R ++
src [ CARETTRIM ] = '(\\s*)' + src [ LONECARET ] + '\\s+'
re [ CARETTRIM ] = new RegExp ( src [ CARETTRIM ] , 'g' )
var caretTrimReplace = '$1^'
var CARET = R ++
src [ CARET ] = '^' + src [ LONECARET ] + src [ XRANGEPLAIN ] + '$'
var CARETLOOSE = R ++
src [ CARETLOOSE ] = '^' + src [ LONECARET ] + src [ XRANGEPLAINLOOSE ] + '$'
// A simple gt/lt/eq thing, or just "" to indicate "any version"
var COMPARATORLOOSE = R ++
src [ COMPARATORLOOSE ] = '^' + src [ GTLT ] + '\\s*(' + LOOSEPLAIN + ')$|^$'
var COMPARATOR = R ++
src [ COMPARATOR ] = '^' + src [ GTLT ] + '\\s*(' + FULLPLAIN + ')$|^$'
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
var COMPARATORTRIM = R ++
src [ COMPARATORTRIM ] = '(\\s*)' + src [ GTLT ] +
'\\s*(' + LOOSEPLAIN + '|' + src [ XRANGEPLAIN ] + ')'
// this one has to use the /g flag
re [ COMPARATORTRIM ] = new RegExp ( src [ COMPARATORTRIM ] , 'g' )
var comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
var HYPHENRANGE = R ++
src [ HYPHENRANGE ] = '^\\s*(' + src [ XRANGEPLAIN ] + ')' +
'\\s+-\\s+' +
'(' + src [ XRANGEPLAIN ] + ')' +
'\\s*$'
var HYPHENRANGELOOSE = R ++
src [ HYPHENRANGELOOSE ] = '^\\s*(' + src [ XRANGEPLAINLOOSE ] + ')' +
'\\s+-\\s+' +
'(' + src [ XRANGEPLAINLOOSE ] + ')' +
'\\s*$'
// Star ranges basically just allow anything at all.
var STAR = R ++
src [ STAR ] = '(<|>)?=?\\s*\\*'
// Compile to actual regexp objects.
// All are flag-free, unless they were created above with a flag.
for ( var i = 0 ; i < R ; i ++ ) {
debug ( i , src [ i ] )
if ( ! re [ i ] ) {
re [ i ] = new RegExp ( src [ i ] )
}
}
exports . parse = parse
function parse ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( version instanceof SemVer ) {
return version
}
if ( typeof version !== 'string' ) {
return null
}
if ( version . length > MAX _LENGTH ) {
return null
}
var r = options . loose ? re [ LOOSE ] : re [ FULL ]
if ( ! r . test ( version ) ) {
return null
}
try {
return new SemVer ( version , options )
} catch ( er ) {
return null
}
}
exports . valid = valid
function valid ( version , options ) {
var v = parse ( version , options )
return v ? v . version : null
}
exports . clean = clean
function clean ( version , options ) {
var s = parse ( version . trim ( ) . replace ( /^[=v]+/ , '' ) , options )
return s ? s . version : null
}
exports . SemVer = SemVer
function SemVer ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( version instanceof SemVer ) {
if ( version . loose === options . loose ) {
return version
} else {
version = version . version
}
} else if ( typeof version !== 'string' ) {
throw new TypeError ( 'Invalid Version: ' + version )
}
if ( version . length > MAX _LENGTH ) {
throw new TypeError ( 'version is longer than ' + MAX _LENGTH + ' characters' )
}
if ( ! ( this instanceof SemVer ) ) {
return new SemVer ( version , options )
}
debug ( 'SemVer' , version , options )
this . options = options
this . loose = ! ! options . loose
var m = version . trim ( ) . match ( options . loose ? re [ LOOSE ] : re [ FULL ] )
if ( ! m ) {
throw new TypeError ( 'Invalid Version: ' + version )
}
this . raw = version
// these are actually numbers
this . major = + m [ 1 ]
this . minor = + m [ 2 ]
this . patch = + m [ 3 ]
if ( this . major > MAX _SAFE _INTEGER || this . major < 0 ) {
throw new TypeError ( 'Invalid major version' )
}
if ( this . minor > MAX _SAFE _INTEGER || this . minor < 0 ) {
throw new TypeError ( 'Invalid minor version' )
}
if ( this . patch > MAX _SAFE _INTEGER || this . patch < 0 ) {
throw new TypeError ( 'Invalid patch version' )
}
// numberify any prerelease numeric ids
if ( ! m [ 4 ] ) {
this . prerelease = [ ]
} else {
this . prerelease = m [ 4 ] . split ( '.' ) . map ( function ( id ) {
if ( /^[0-9]+$/ . test ( id ) ) {
var num = + id
if ( num >= 0 && num < MAX _SAFE _INTEGER ) {
return num
}
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
return id
} )
}
this . build = m [ 5 ] ? m [ 5 ] . split ( '.' ) : [ ]
this . format ( )
}
SemVer . prototype . format = function ( ) {
this . version = this . major + '.' + this . minor + '.' + this . patch
if ( this . prerelease . length ) {
this . version += '-' + this . prerelease . join ( '.' )
}
return this . version
}
SemVer . prototype . toString = function ( ) {
return this . version
}
SemVer . prototype . compare = function ( other ) {
debug ( 'SemVer.compare' , this . version , this . options , other )
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
return this . compareMain ( other ) || this . comparePre ( other )
}
SemVer . prototype . compareMain = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
return compareIdentifiers ( this . major , other . major ) ||
compareIdentifiers ( this . minor , other . minor ) ||
compareIdentifiers ( this . patch , other . patch )
}
SemVer . prototype . comparePre = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
// NOT having a prerelease is > having one
if ( this . prerelease . length && ! other . prerelease . length ) {
return - 1
} else if ( ! this . prerelease . length && other . prerelease . length ) {
return 1
} else if ( ! this . prerelease . length && ! other . prerelease . length ) {
return 0
}
var i = 0
do {
var a = this . prerelease [ i ]
var b = other . prerelease [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
SemVer . prototype . inc = function ( release , identifier ) {
switch ( release ) {
case 'premajor' :
this . prerelease . length = 0
this . patch = 0
this . minor = 0
this . major ++
this . inc ( 'pre' , identifier )
break
case 'preminor' :
this . prerelease . length = 0
this . patch = 0
this . minor ++
this . inc ( 'pre' , identifier )
break
case 'prepatch' :
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this . prerelease . length = 0
this . inc ( 'patch' , identifier )
this . inc ( 'pre' , identifier )
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease' :
if ( this . prerelease . length === 0 ) {
this . inc ( 'patch' , identifier )
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
this . inc ( 'pre' , identifier )
break
case 'major' :
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if ( this . minor !== 0 ||
this . patch !== 0 ||
this . prerelease . length === 0 ) {
this . major ++
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
this . minor = 0
this . patch = 0
this . prerelease = [ ]
break
case 'minor' :
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if ( this . patch !== 0 || this . prerelease . length === 0 ) {
this . minor ++
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
this . patch = 0
this . prerelease = [ ]
break
case 'patch' :
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if ( this . prerelease . length === 0 ) {
this . patch ++
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
this . prerelease = [ ]
break
// This probably shouldn't be used publicly.
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
case 'pre' :
if ( this . prerelease . length === 0 ) {
this . prerelease = [ 0 ]
} else {
var i = this . prerelease . length
while ( -- i >= 0 ) {
if ( typeof this . prerelease [ i ] === 'number' ) {
this . prerelease [ i ] ++
i = - 2
}
}
if ( i === - 1 ) {
// didn't increment anything
this . prerelease . push ( 0 )
}
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
if ( identifier ) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
if ( this . prerelease [ 0 ] === identifier ) {
if ( isNaN ( this . prerelease [ 1 ] ) ) {
this . prerelease = [ identifier , 0 ]
}
} else {
this . prerelease = [ identifier , 0 ]
}
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
break
default :
throw new Error ( 'invalid increment argument: ' + release )
}
this . format ( )
this . raw = this . version
return this
}
exports . inc = inc
function inc ( version , release , loose , identifier ) {
if ( typeof ( loose ) === 'string' ) {
identifier = loose
loose = undefined
}
try {
return new SemVer ( version , loose ) . inc ( release , identifier ) . version
} catch ( er ) {
return null
}
}
exports . diff = diff
function diff ( version1 , version2 ) {
if ( eq ( version1 , version2 ) ) {
return null
} else {
var v1 = parse ( version1 )
var v2 = parse ( version2 )
var prefix = ''
if ( v1 . prerelease . length || v2 . prerelease . length ) {
prefix = 'pre'
var defaultResult = 'prerelease'
}
for ( var key in v1 ) {
if ( key === 'major' || key === 'minor' || key === 'patch' ) {
if ( v1 [ key ] !== v2 [ key ] ) {
return prefix + key
}
}
}
return defaultResult // may be undefined
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . compareIdentifiers = compareIdentifiers
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var numeric = /^[0-9]+$/
function compareIdentifiers ( a , b ) {
var anum = numeric . test ( a )
var bnum = numeric . test ( b )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( anum && bnum ) {
a = + a
b = + b
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
return a === b ? 0
: ( anum && ! bnum ) ? - 1
: ( bnum && ! anum ) ? 1
: a < b ? - 1
: 1
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . rcompareIdentifiers = rcompareIdentifiers
function rcompareIdentifiers ( a , b ) {
return compareIdentifiers ( b , a )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . major = major
function major ( a , loose ) {
return new SemVer ( a , loose ) . major
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . minor = minor
function minor ( a , loose ) {
return new SemVer ( a , loose ) . minor
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . patch = patch
function patch ( a , loose ) {
return new SemVer ( a , loose ) . patch
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
exports . compare = compare
function compare ( a , b , loose ) {
return new SemVer ( a , loose ) . compare ( new SemVer ( b , loose ) )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . compareLoose = compareLoose
function compareLoose ( a , b ) {
return compare ( a , b , true )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . rcompare = rcompare
function rcompare ( a , b , loose ) {
return compare ( b , a , loose )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . sort = sort
function sort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . compare ( a , b , loose )
} )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . rsort = rsort
function rsort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . rcompare ( a , b , loose )
} )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . gt = gt
function gt ( a , b , loose ) {
return compare ( a , b , loose ) > 0
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . lt = lt
function lt ( a , b , loose ) {
return compare ( a , b , loose ) < 0
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . eq = eq
function eq ( a , b , loose ) {
return compare ( a , b , loose ) === 0
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . neq = neq
function neq ( a , b , loose ) {
return compare ( a , b , loose ) !== 0
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . gte = gte
function gte ( a , b , loose ) {
return compare ( a , b , loose ) >= 0
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . lte = lte
function lte ( a , b , loose ) {
return compare ( a , b , loose ) <= 0
}
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
exports . cmp = cmp
function cmp ( a , op , b , loose ) {
switch ( op ) {
case '===' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a === b
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
case '!==' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a !== b
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
case '' :
case '=' :
case '==' :
return eq ( a , b , loose )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
case '!=' :
return neq ( a , b , loose )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
case '>' :
return gt ( a , b , loose )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
case '>=' :
return gte ( a , b , loose )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
case '<' :
return lt ( a , b , loose )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
case '<=' :
return lte ( a , b , loose )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
default :
throw new TypeError ( 'Invalid operator: ' + op )
}
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . Comparator = Comparator
function Comparator ( comp , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( comp instanceof Comparator ) {
if ( comp . loose === ! ! options . loose ) {
return comp
} else {
comp = comp . value
}
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( ! ( this instanceof Comparator ) ) {
return new Comparator ( comp , options )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
debug ( 'comparator' , comp , options )
this . options = options
this . loose = ! ! options . loose
this . parse ( comp )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( this . semver === ANY ) {
this . value = ''
} else {
this . value = this . operator + this . semver . version
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
debug ( 'comp' , this )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var ANY = { }
Comparator . prototype . parse = function ( comp ) {
var r = this . options . loose ? re [ COMPARATORLOOSE ] : re [ COMPARATOR ]
var m = comp . match ( r )
if ( ! m ) {
throw new TypeError ( 'Invalid comparator: ' + comp )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
this . operator = m [ 1 ]
if ( this . operator === '=' ) {
this . operator = ''
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// if it literally is just '>' or '' then allow anything.
if ( ! m [ 2 ] ) {
this . semver = ANY
} else {
this . semver = new SemVer ( m [ 2 ] , this . options . loose )
}
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
Comparator . prototype . toString = function ( ) {
return this . value
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
Comparator . prototype . test = function ( version ) {
debug ( 'Comparator.test' , version , this . options . loose )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( this . semver === ANY ) {
return true
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( typeof version === 'string' ) {
version = new SemVer ( version , this . options )
}
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
return cmp ( version , this . operator , this . semver , this . options )
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
Comparator . prototype . intersects = function ( comp , options ) {
if ( ! ( comp instanceof Comparator ) ) {
throw new TypeError ( 'a Comparator is required' )
}
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
}
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
var rangeTmp
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
if ( this . operator === '' ) {
rangeTmp = new Range ( comp . value , options )
return satisfies ( this . value , rangeTmp , options )
} else if ( comp . operator === '' ) {
rangeTmp = new Range ( this . value , options )
return satisfies ( comp . semver , rangeTmp , options )
}
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
var sameDirectionIncreasing =
( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '>=' || comp . operator === '>' )
var sameDirectionDecreasing =
( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '<=' || comp . operator === '<' )
var sameSemVer = this . semver . version === comp . semver . version
var differentDirectionsInclusive =
( this . operator === '>=' || this . operator === '<=' ) &&
( comp . operator === '>=' || comp . operator === '<=' )
var oppositeDirectionsLessThan =
cmp ( this . semver , '<' , comp . semver , options ) &&
( ( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '<=' || comp . operator === '<' ) )
var oppositeDirectionsGreaterThan =
cmp ( this . semver , '>' , comp . semver , options ) &&
( ( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '>=' || comp . operator === '>' ) )
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
return sameDirectionIncreasing || sameDirectionDecreasing ||
( sameSemVer && differentDirectionsInclusive ) ||
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
exports . Range = Range
function Range ( range , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
}
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
if ( range instanceof Range ) {
if ( range . loose === ! ! options . loose &&
range . includePrerelease === ! ! options . includePrerelease ) {
return range
} else {
return new Range ( range . raw , options )
2020-06-06 12:12:17 +09:00
}
2020-08-22 14:57:57 +09:00
}
2020-06-06 12:12:17 +09:00
2020-08-22 14:57:57 +09:00
if ( range instanceof Comparator ) {
return new Range ( range . value , options )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( ! ( this instanceof Range ) ) {
return new Range ( range , options )
}
this . options = options
this . loose = ! ! options . loose
this . includePrerelease = ! ! options . includePrerelease
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// First, split based on boolean or ||
this . raw = range
this . set = range . split ( /\s*\|\|\s*/ ) . map ( function ( range ) {
return this . parseRange ( range . trim ( ) )
} , this ) . filter ( function ( c ) {
// throw out any that are not relevant for whatever reason
return c . length
} )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( ! this . set . length ) {
throw new TypeError ( 'Invalid SemVer Range: ' + range )
}
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
this . format ( )
}
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
Range . prototype . format = function ( ) {
this . range = this . set . map ( function ( comps ) {
return comps . join ( ' ' ) . trim ( )
} ) . join ( '||' ) . trim ( )
return this . range
}
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
Range . prototype . toString = function ( ) {
return this . range
}
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
Range . prototype . parseRange = function ( range ) {
var loose = this . options . loose
range = range . trim ( )
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re [ HYPHENRANGELOOSE ] : re [ HYPHENRANGE ]
range = range . replace ( hr , hyphenReplace )
debug ( 'hyphen replace' , range )
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range . replace ( re [ COMPARATORTRIM ] , comparatorTrimReplace )
debug ( 'comparator trim' , range , re [ COMPARATORTRIM ] )
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
// `~ 1.2.3` => `~1.2.3`
range = range . replace ( re [ TILDETRIM ] , tildeTrimReplace )
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
// `^ 1.2.3` => `^1.2.3`
range = range . replace ( re [ CARETTRIM ] , caretTrimReplace )
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
// normalize spaces
range = range . split ( /\s+/ ) . join ( ' ' )
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
// At this point, the range is completely trimmed and
// ready to be split into comparators.
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
var compRe = loose ? re [ COMPARATORLOOSE ] : re [ COMPARATOR ]
var set = range . split ( ' ' ) . map ( function ( comp ) {
return parseComparator ( comp , this . options )
} , this ) . join ( ' ' ) . split ( /\s+/ )
if ( this . options . loose ) {
// in loose mode, throw out any that are not valid comparators
set = set . filter ( function ( comp ) {
return ! ! comp . match ( compRe )
} )
2020-03-21 11:46:39 +09:00
}
2020-08-22 14:57:57 +09:00
set = set . map ( function ( comp ) {
return new Comparator ( comp , this . options )
} , this )
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
return set
2020-03-21 11:46:39 +09:00
}
2020-08-22 14:57:57 +09:00
Range . prototype . intersects = function ( range , options ) {
if ( ! ( range instanceof Range ) ) {
throw new TypeError ( 'a Range is required' )
}
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
return this . set . some ( function ( thisComparators ) {
return thisComparators . every ( function ( thisComparator ) {
return range . set . some ( function ( rangeComparators ) {
return rangeComparators . every ( function ( rangeComparator ) {
return thisComparator . intersects ( rangeComparator , options )
} )
} )
} )
} )
}
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
// Mostly just for testing and legacy API reasons
exports . toComparators = toComparators
function toComparators ( range , options ) {
return new Range ( range , options ) . set . map ( function ( comp ) {
return comp . map ( function ( c ) {
return c . value
} ) . join ( ' ' ) . trim ( ) . split ( ' ' )
} )
}
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
function parseComparator ( comp , options ) {
debug ( 'comp' , comp , options )
comp = replaceCarets ( comp , options )
debug ( 'caret' , comp )
comp = replaceTildes ( comp , options )
debug ( 'tildes' , comp )
comp = replaceXRanges ( comp , options )
debug ( 'xrange' , comp )
comp = replaceStars ( comp , options )
debug ( 'stars' , comp )
return comp
2020-03-21 11:46:39 +09:00
}
2020-08-22 14:57:57 +09:00
function isX ( id ) {
return ! id || id . toLowerCase ( ) === 'x' || id === '*'
2020-03-21 11:46:39 +09:00
}
2020-08-22 14:57:57 +09:00
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
function replaceTildes ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceTilde ( comp , options )
} ) . join ( ' ' )
2020-03-21 11:46:39 +09:00
}
2020-08-22 14:57:57 +09:00
function replaceTilde ( comp , options ) {
var r = options . loose ? re [ TILDELOOSE ] : re [ TILDE ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'tilde' , comp , _ , M , m , p , pr )
var ret
2020-03-21 11:46:39 +09:00
2020-08-22 14:57:57 +09:00
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
// ~1.2 == >=1.2.0 <1.3.0
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else if ( pr ) {
debug ( 'replaceTilde pr' , pr )
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
} else {
// ~1.2.3 == >=1.2.3 <1.3.0
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
debug ( 'tilde return' , ret )
return ret
} )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
// ^1.2.3 --> >=1.2.3 <2.0.0
// ^1.2.0 --> >=1.2.0 <2.0.0
function replaceCarets ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceCaret ( comp , options )
} ) . join ( ' ' )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function replaceCaret ( comp , options ) {
debug ( 'caret' , comp , options )
var r = options . loose ? re [ CARETLOOSE ] : re [ CARET ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'caret' , comp , _ , M , m , p , pr )
var ret
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
if ( M === '0' ) {
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else {
ret = '>=' + M + '.' + m + '.0 <' + ( + M + 1 ) + '.0.0'
}
} else if ( pr ) {
debug ( 'replaceCaret pr' , pr )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + ( + M + 1 ) + '.0.0'
}
} else {
debug ( 'no pr' )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + ( + M + 1 ) + '.0.0'
}
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
debug ( 'caret return' , ret )
return ret
} )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function replaceXRanges ( comp , options ) {
debug ( 'replaceXRanges' , comp , options )
return comp . split ( /\s+/ ) . map ( function ( comp ) {
return replaceXRange ( comp , options )
} ) . join ( ' ' )
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
function replaceXRange ( comp , options ) {
comp = comp . trim ( )
var r = options . loose ? re [ XRANGELOOSE ] : re [ XRANGE ]
return comp . replace ( r , function ( ret , gtlt , M , m , p , pr ) {
debug ( 'xRange' , comp , ret , gtlt , M , m , p , pr )
var xM = isX ( M )
var xm = xM || isX ( m )
var xp = xm || isX ( p )
var anyX = xp
2020-05-01 11:47:52 +00:00
2020-08-22 14:57:57 +09:00
if ( gtlt === '=' && anyX ) {
gtlt = ''
}
2020-05-01 11:47:52 +00:00
2020-08-22 14:57:57 +09:00
if ( xM ) {
if ( gtlt === '>' || gtlt === '<' ) {
// nothing is allowed
ret = '<0.0.0'
} else {
// nothing is forbidden
ret = '*'
}
} else if ( gtlt && anyX ) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if ( xm ) {
m = 0
}
p = 0
2020-05-01 11:47:52 +00:00
2020-08-22 14:57:57 +09:00
if ( gtlt === '>' ) {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
// >1.2.3 => >= 1.2.4
gtlt = '>='
if ( xm ) {
M = + M + 1
m = 0
p = 0
} else {
m = + m + 1
p = 0
2020-05-01 11:47:52 +00:00
}
2020-08-22 14:57:57 +09:00
} else if ( gtlt === '<=' ) {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if ( xm ) {
M = + M + 1
} else {
m = + m + 1
}
}
2020-05-01 11:47:52 +00:00
2020-08-22 14:57:57 +09:00
ret = gtlt + M + '.' + m + '.' + p
} else if ( xm ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( xp ) {
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
}
2020-05-01 11:47:52 +00:00
2020-08-22 14:57:57 +09:00
debug ( 'xRange return' , ret )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
return ret
} )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
function replaceStars ( comp , options ) {
debug ( 'replaceStars' , comp , options )
// Looseness is ignored here. star is always as loose as it gets!
return comp . trim ( ) . replace ( re [ STAR ] , '' )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// This function is passed to string.replace(re[HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0
function hyphenReplace ( $0 ,
from , fM , fm , fp , fpr , fb ,
to , tM , tm , tp , tpr , tb ) {
if ( isX ( fM ) ) {
from = ''
} else if ( isX ( fm ) ) {
from = '>=' + fM + '.0.0'
} else if ( isX ( fp ) ) {
from = '>=' + fM + '.' + fm + '.0'
} else {
from = '>=' + from
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( isX ( tM ) ) {
to = ''
} else if ( isX ( tm ) ) {
to = '<' + ( + tM + 1 ) + '.0.0'
} else if ( isX ( tp ) ) {
to = '<' + tM + '.' + ( + tm + 1 ) + '.0'
} else if ( tpr ) {
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
} else {
to = '<=' + to
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
return ( from + ' ' + to ) . trim ( )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// if ANY of the sets match ALL of its comparators, then pass
Range . prototype . test = function ( version ) {
if ( ! version ) {
return false
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( typeof version === 'string' ) {
version = new SemVer ( version , this . options )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
for ( var i = 0 ; i < this . set . length ; i ++ ) {
if ( testSet ( this . set [ i ] , version , this . options ) ) {
return true
}
}
return false
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function testSet ( set , version , options ) {
for ( var i = 0 ; i < set . length ; i ++ ) {
if ( ! set [ i ] . test ( version ) ) {
return false
}
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( version . prerelease . length && ! options . includePrerelease ) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for ( i = 0 ; i < set . length ; i ++ ) {
debug ( set [ i ] . semver )
if ( set [ i ] . semver === ANY ) {
continue
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( set [ i ] . semver . prerelease . length > 0 ) {
var allowed = set [ i ] . semver
if ( allowed . major === version . major &&
allowed . minor === version . minor &&
allowed . patch === version . patch ) {
return true
}
}
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// Version has a -pre, but it's not one of the ones we like.
return false
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
return true
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . satisfies = satisfies
function satisfies ( version , range , options ) {
try {
range = new Range ( range , options )
} catch ( er ) {
return false
}
return range . test ( version )
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
exports . maxSatisfying = maxSatisfying
function maxSatisfying ( versions , range , options ) {
var max = null
var maxSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! max || maxSV . compare ( v ) === - 1 ) {
// compare(max, v, true)
max = v
maxSV = new SemVer ( max , options )
}
}
} )
return max
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . minSatisfying = minSatisfying
function minSatisfying ( versions , range , options ) {
var min = null
var minSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! min || minSV . compare ( v ) === 1 ) {
// compare(min, v, true)
min = v
minSV = new SemVer ( min , options )
}
}
} )
return min
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
exports . minVersion = minVersion
function minVersion ( range , loose ) {
range = new Range ( range , loose )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var minver = new SemVer ( '0.0.0' )
if ( range . test ( minver ) ) {
return minver
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
minver = new SemVer ( '0.0.0-0' )
if ( range . test ( minver ) ) {
return minver
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
minver = null
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
comparators . forEach ( function ( comparator ) {
// Clone to avoid manipulating the comparator's semver object.
var compver = new SemVer ( comparator . semver . version )
switch ( comparator . operator ) {
case '>' :
if ( compver . prerelease . length === 0 ) {
compver . patch ++
} else {
compver . prerelease . push ( 0 )
}
compver . raw = compver . format ( )
/* fallthrough */
case '' :
case '>=' :
if ( ! minver || gt ( minver , compver ) ) {
minver = compver
}
break
case '<' :
case '<=' :
/* Ignore maximum versions */
break
/* istanbul ignore next */
default :
throw new Error ( 'Unexpected operation: ' + comparator . operator )
}
} )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( minver && range . test ( minver ) ) {
return minver
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
return null
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . validRange = validRange
function validRange ( range , options ) {
try {
// Return '*' instead of '' so that truthiness works.
// This will throw if it's invalid anyway
return new Range ( range , options ) . range || '*'
} catch ( er ) {
return null
}
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
// Determine if version is less than all the versions possible in the range
exports . ltr = ltr
function ltr ( version , range , options ) {
return outside ( version , range , '<' , options )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// Determine if version is greater than all the versions possible in the range.
exports . gtr = gtr
function gtr ( version , range , options ) {
return outside ( version , range , '>' , options )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . outside = outside
function outside ( version , range , hilo , options ) {
version = new SemVer ( version , options )
range = new Range ( range , options )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var gtfn , ltefn , ltfn , comp , ecomp
switch ( hilo ) {
case '>' :
gtfn = gt
ltefn = lte
ltfn = lt
comp = '>'
ecomp = '>='
break
case '<' :
gtfn = lt
ltefn = gte
ltfn = gt
comp = '<'
ecomp = '<='
break
default :
throw new TypeError ( 'Must provide a hilo val of "<" or ">"' )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// If it satisifes the range it is not outside
if ( satisfies ( version , range , options ) ) {
return false
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// From now on, variable terms are as if we're in "gtr" mode.
// but note that everything is flipped for the "ltr" function.
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var high = null
var low = null
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
comparators . forEach ( function ( comparator ) {
if ( comparator . semver === ANY ) {
comparator = new Comparator ( '>=0.0.0' )
}
high = high || comparator
low = low || comparator
if ( gtfn ( comparator . semver , high . semver , options ) ) {
high = comparator
} else if ( ltfn ( comparator . semver , low . semver , options ) ) {
low = comparator
}
} )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// If the edge version comparator has a operator then our version
// isn't outside it
if ( high . operator === comp || high . operator === ecomp ) {
return false
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// If the lowest version comparator has an operator and our version
// is less than it then it isn't higher than the range
if ( ( ! low . operator || low . operator === comp ) &&
ltefn ( version , low . semver ) ) {
return false
} else if ( low . operator === ecomp && ltfn ( version , low . semver ) ) {
return false
}
}
return true
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . prerelease = prerelease
function prerelease ( version , options ) {
var parsed = parse ( version , options )
return ( parsed && parsed . prerelease . length ) ? parsed . prerelease : null
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
exports . intersects = intersects
function intersects ( r1 , r2 , options ) {
r1 = new Range ( r1 , options )
r2 = new Range ( r2 , options )
return r1 . intersects ( r2 )
2019-11-24 16:44:26 +09:00
}
2020-08-22 14:57:57 +09:00
exports . coerce = coerce
function coerce ( version ) {
if ( version instanceof SemVer ) {
return version
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( typeof version !== 'string' ) {
return null
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var match = version . match ( re [ COERCE ] )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( match == null ) {
return null
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
return parse ( match [ 1 ] +
'.' + ( match [ 2 ] || '0' ) +
'.' + ( match [ 3 ] || '0' ) )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ } ) ,
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ 914 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . getApiBaseUrl = exports . getProxyAgent = exports . getAuthString = void 0 ;
const httpClient = _ _importStar ( _ _webpack _require _ _ ( 925 ) ) ;
function getAuthString ( token , options ) {
if ( ! token && ! options . auth ) {
throw new Error ( 'Parameter token or opts.auth is required' ) ;
}
else if ( token && options . auth ) {
throw new Error ( 'Parameters token and opts.auth may not both be specified' ) ;
}
return typeof options . auth === 'string' ? options . auth : ` token ${ token } ` ;
}
exports . getAuthString = getAuthString ;
function getProxyAgent ( destinationUrl ) {
const hc = new httpClient . HttpClient ( ) ;
return hc . getAgent ( destinationUrl ) ;
}
exports . getProxyAgent = getProxyAgent ;
function getApiBaseUrl ( ) {
return process . env [ 'GITHUB_API_URL' ] || 'https://api.github.com' ;
}
exports . getApiBaseUrl = getApiBaseUrl ;
//# sourceMappingURL=utils.js.map
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ } ) ,
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ 925 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const url = _ _webpack _require _ _ ( 835 ) ;
const http = _ _webpack _require _ _ ( 605 ) ;
const https = _ _webpack _require _ _ ( 211 ) ;
const pm = _ _webpack _require _ _ ( 443 ) ;
let tunnel ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
/ * *
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
function getProxyUrl ( serverUrl ) {
let proxyUrl = pm . getProxyUrl ( url . parse ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
}
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return new Promise ( async ( resolve , reject ) => {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ;
}
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
let parsedUrl = url . parse ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
}
}
options ( requestUrl , additionalHeaders ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
}
get ( requestUrl , additionalHeaders ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
}
del ( requestUrl , additionalHeaders ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
}
head ( requestUrl , additionalHeaders ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
async getJson ( requestUrl , additionalHeaders = { } ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
let res = await this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async postJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async putJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async patchJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
async request ( verb , requestUrl , data , headers ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
}
let parsedUrl = url . parse ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
let maxTries = this . _allowRetries && RetryableHttpVerbs . indexOf ( verb ) != - 1
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
while ( numTries < maxTries ) {
response = await this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( let i = 0 ; i < this . handlers . length ; i ++ ) {
if ( this . handlers [ i ] . canHandleAuthentication ( response ) ) {
authenticationHandler = this . handlers [ i ] ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
while ( HttpRedirectCodes . indexOf ( response . message . statusCode ) != - 1 &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
let parsedRedirectUrl = url . parse ( redirectUrl ) ;
if ( parsedUrl . protocol == 'https:' &&
parsedUrl . protocol != parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( let header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = await this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( HttpResponseRetryCodes . indexOf ( response . message . statusCode ) == - 1 ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
await response . readBody ( ) ;
await this . _performExponentialBackoff ( numTries ) ;
}
}
return response ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return new Promise ( ( resolve , reject ) => {
let callbackForResult = function ( err , res ) {
if ( err ) {
reject ( err ) ;
}
resolve ( res ) ;
} ;
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
let socket ;
if ( typeof data === 'string' ) {
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
}
let callbackCalled = false ;
let handleResult = ( err , res ) => {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
}
} ;
let req = info . httpModule . request ( info . options , ( msg ) => {
let res = new HttpClientResponse ( msg ) ;
handleResult ( null , res ) ;
} ) ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( 'Request timeout: ' + info . options . path ) , null ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err , null ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
}
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
let parsedUrl = url . parse ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
this . handlers . forEach ( handler => {
handler . prepareRequest ( info . options ) ;
} ) ;
}
return info ;
}
_mergeHeaders ( headers ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
let proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
let useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( ! ! agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( ! ! this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
if ( useProxy ) {
// If using proxy, need tunnel
if ( ! tunnel ) {
tunnel = _ _webpack _require _ _ ( 294 ) ;
}
const agentOptions = {
maxSockets : maxSockets ,
keepAlive : this . _keepAlive ,
proxy : {
proxyAuth : proxyUrl . auth ,
host : proxyUrl . hostname ,
port : proxyUrl . port
}
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
}
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
}
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets : maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
}
static dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
let a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
async _processResponse ( res , options ) {
return new Promise ( async ( resolve , reject ) => {
const statusCode = res . message . statusCode ;
const response = {
statusCode : statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode == HttpCodes . NotFound ) {
resolve ( response ) ;
}
let obj ;
let contents ;
// get the result from the body
try {
contents = await res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , HttpClient . dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = 'Failed request: (' + statusCode + ')' ;
}
let err = new Error ( msg ) ;
// attach statusCode and body obj (if available) to the error object
err [ 'statusCode' ] = statusCode ;
if ( response . result ) {
err [ 'result' ] = response . result ;
}
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ;
}
}
exports . HttpClient = HttpClient ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ } ) ,
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
/***/ 931 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// Note: since nyc uses this module to output coverage, any lines
// that are in the direct sync flow of nyc's outputCoverage are
// ignored, since we can never get coverage for them.
var assert = _ _webpack _require _ _ ( 357 )
var signals = _ _webpack _require _ _ ( 710 )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var EE = _ _webpack _require _ _ ( 614 )
/* istanbul ignore if */
if ( typeof EE !== 'function' ) {
EE = EE . EventEmitter
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var emitter
if ( process . _ _signal _exit _emitter _ _ ) {
emitter = process . _ _signal _exit _emitter _ _
} else {
emitter = process . _ _signal _exit _emitter _ _ = new EE ( )
emitter . count = 0
emitter . emitted = { }
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// Because this emitter is a global, we have to check to see if a
// previous version of this library failed to enable infinite listeners.
// I know what you're about to say. But literally everything about
// signal-exit is a compromise with evil. Get used to it.
if ( ! emitter . infinite ) {
emitter . setMaxListeners ( Infinity )
emitter . infinite = true
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
module . exports = function ( cb , opts ) {
assert . equal ( typeof cb , 'function' , 'a callback must be provided for exit handler' )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( loaded === false ) {
load ( )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var ev = 'exit'
if ( opts && opts . alwaysLast ) {
ev = 'afterexit'
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var remove = function ( ) {
emitter . removeListener ( ev , cb )
if ( emitter . listeners ( 'exit' ) . length === 0 &&
emitter . listeners ( 'afterexit' ) . length === 0 ) {
unload ( )
}
}
emitter . on ( ev , cb )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
return remove
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
module . exports . unload = unload
function unload ( ) {
if ( ! loaded ) {
return
}
loaded = false
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
signals . forEach ( function ( sig ) {
try {
process . removeListener ( sig , sigListeners [ sig ] )
} catch ( er ) { }
} )
process . emit = originalProcessEmit
process . reallyExit = originalProcessReallyExit
emitter . count -= 1
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function emit ( event , code , signal ) {
if ( emitter . emitted [ event ] ) {
return
}
emitter . emitted [ event ] = true
emitter . emit ( event , code , signal )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// { <signal>: <listener fn>, ... }
var sigListeners = { }
signals . forEach ( function ( sig ) {
sigListeners [ sig ] = function listener ( ) {
// If there are no other listeners, an exit is coming!
// Simplest way: remove us and then re-send the signal.
// We know that this will kill the process, so we can
// safely emit now.
var listeners = process . listeners ( sig )
if ( listeners . length === emitter . count ) {
unload ( )
emit ( 'exit' , null , sig )
/* istanbul ignore next */
emit ( 'afterexit' , null , sig )
/* istanbul ignore next */
process . kill ( process . pid , sig )
}
}
} )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
module . exports . signals = function ( ) {
return signals
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
module . exports . load = load
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var loaded = false
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function load ( ) {
if ( loaded ) {
return
}
loaded = true
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// This is the number of onSignalExit's that are in play.
// It's important so that we can count the correct number of
// listeners on signals, and don't wait for the other one to
// handle it instead of us.
emitter . count += 1
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
signals = signals . filter ( function ( sig ) {
try {
process . on ( sig , sigListeners [ sig ] )
return true
} catch ( er ) {
return false
}
} )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
process . emit = processEmit
process . reallyExit = processReallyExit
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var originalProcessReallyExit = process . reallyExit
function processReallyExit ( code ) {
process . exitCode = code || 0
emit ( 'exit' , process . exitCode , null )
/* istanbul ignore next */
emit ( 'afterexit' , process . exitCode , null )
/* istanbul ignore next */
originalProcessReallyExit . call ( process , process . exitCode )
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
var originalProcessEmit = process . emit
function processEmit ( ev , arg ) {
if ( ev === 'exit' ) {
if ( arg !== undefined ) {
process . exitCode = arg
}
var ret = originalProcessEmit . apply ( this , arguments )
emit ( 'exit' , process . exitCode , null )
/* istanbul ignore next */
emit ( 'afterexit' , process . exitCode , null )
return ret
} else {
return originalProcessEmit . apply ( this , arguments )
}
}
2019-11-24 16:44:26 +09:00
/***/ } ) ,
2020-08-22 14:57:57 +09:00
/***/ 932 :
/***/ ( function ( _ _unusedmodule , _ _unusedexports , _ _webpack _require _ _ ) {
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
const { inspect } = _ _webpack _require _ _ ( 669 ) ;
const core = _ _webpack _require _ _ ( 186 ) ;
const github = _ _webpack _require _ _ ( 438 ) ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
const REACTION _TYPES = [
"+1" ,
"-1" ,
"laugh" ,
"confused" ,
"heart" ,
"hooray" ,
"rocket" ,
"eyes" ,
] ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
async function addReactions ( octokit , repo , comment _id , reactions ) {
let ReactionsSet = [
... new Set (
reactions
. replace ( /\s/g , "" )
. split ( "," )
. filter ( ( item ) => {
if ( ! REACTION _TYPES . includes ( item ) ) {
core . info ( ` Skipping invalid reaction ' ${ item } '. ` ) ;
return false ;
}
return true ;
} )
) ,
] ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( ! ReactionsSet ) {
core . setFailed (
` No valid reactions are contained in ' ${ reactions } '. `
) ;
return false ;
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
let results = await Promise . allSettled (
ReactionsSet . map ( async ( item ) => {
await octokit . reactions . createForIssueComment ( {
owner : repo [ 0 ] ,
repo : repo [ 1 ] ,
comment _id : comment _id ,
content : item ,
} ) ;
core . info ( ` Setting ' ${ item } ' reaction on comment. ` ) ;
} )
) ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
for ( let i = 0 , l = results . length ; i < l ; i ++ ) {
if ( results [ i ] . status === "fulfilled" ) {
core . info (
` Added reaction ' ${ ReactionsSet [ i ] } ' to comment id ' ${ comment _id } '. `
) ;
} else if ( results [ i ] . status === "rejected" ) {
core . info (
` Adding reaction ' ${ ReactionsSet [ i ] } ' to comment id ' ${ comment _id } ' failed with ${ results [ i ] . reason } . `
) ;
}
}
ReactionsSet = undefined ;
results = undefined ;
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
async function run ( ) {
try {
const inputs = {
token : core . getInput ( "token" ) ,
repository : core . getInput ( "repository" ) ,
issueNumber : core . getInput ( "issue-number" ) ,
commentId : core . getInput ( "comment-id" ) ,
body : core . getInput ( "body" ) ,
editMode : core . getInput ( "edit-mode" ) ,
reactions : core . getInput ( "reactions" )
? core . getInput ( "reactions" )
: core . getInput ( "reaction-type" ) ,
} ;
core . debug ( ` Inputs: ${ inspect ( inputs ) } ` ) ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
const repository = inputs . repository
? inputs . repository
: process . env . GITHUB _REPOSITORY ;
const repo = repository . split ( "/" ) ;
core . debug ( ` repository: ${ repository } ` ) ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
const editMode = inputs . editMode ? inputs . editMode : "append" ;
core . debug ( ` editMode: ${ editMode } ` ) ;
if ( ! [ "append" , "replace" ] . includes ( editMode ) ) {
core . setFailed ( ` Invalid edit-mode ' ${ editMode } '. ` ) ;
return ;
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
const octokit = github . getOctokit ( inputs . token ) ;
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( inputs . commentId ) {
// Edit a comment
if ( ! inputs . body && ! inputs . reactions ) {
core . setFailed ( "Missing either comment 'body' or 'reactions'." ) ;
return ;
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( inputs . body ) {
var commentBody = "" ;
if ( editMode == "append" ) {
// Get the comment body
const { data : comment } = await octokit . issues . getComment ( {
owner : repo [ 0 ] ,
repo : repo [ 1 ] ,
comment _id : inputs . commentId ,
} ) ;
commentBody = comment . body + "\n" ;
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
commentBody = commentBody + inputs . body ;
core . debug ( ` Comment body: ${ commentBody } ` ) ;
await octokit . issues . updateComment ( {
owner : repo [ 0 ] ,
repo : repo [ 1 ] ,
comment _id : inputs . commentId ,
body : commentBody ,
} ) ;
core . info ( ` Updated comment id ' ${ inputs . commentId } '. ` ) ;
core . setOutput ( "comment-id" , inputs . commentId ) ;
}
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// Set comment reactions
if ( inputs . reactions ) {
await addReactions ( octokit , repo , inputs . commentId , inputs . reactions ) ;
}
} else if ( inputs . issueNumber ) {
// Create a comment
if ( ! inputs . body ) {
core . setFailed ( "Missing comment 'body'." ) ;
return ;
}
const { data : comment } = await octokit . issues . createComment ( {
owner : repo [ 0 ] ,
repo : repo [ 1 ] ,
issue _number : inputs . issueNumber ,
body : inputs . body ,
} ) ;
core . info (
` Created comment id ' ${ comment . id } ' on issue ' ${ inputs . issueNumber } '. `
) ;
core . setOutput ( "comment-id" , comment . id ) ;
// Set comment reactions
if ( inputs . reactions ) {
await addReactions ( octokit , repo , comment . id , inputs . reactions ) ;
}
} else {
core . setFailed ( "Missing either 'issue-number' or 'comment-id'." ) ;
return ;
}
} catch ( error ) {
core . debug ( inspect ( error ) ) ;
core . setFailed ( error . message ) ;
}
}
run ( ) ;
2019-11-24 16:44:26 +09:00
/***/ } ) ,
2020-08-22 14:57:57 +09:00
/***/ 940 :
/***/ ( function ( module ) {
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module . exports = wrappy
function wrappy ( fn , cb ) {
if ( fn && cb ) return wrappy ( fn ) ( cb )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
if ( typeof fn !== 'function' )
throw new TypeError ( 'need wrapper function' )
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
Object . keys ( fn ) . forEach ( function ( k ) {
wrapper [ k ] = fn [ k ]
2019-11-24 16:44:26 +09:00
} )
2020-08-22 14:57:57 +09:00
return wrapper
2019-11-24 16:44:26 +09:00
2020-08-22 14:57:57 +09:00
function wrapper ( ) {
var args = new Array ( arguments . length )
for ( var i = 0 ; i < args . length ; i ++ ) {
args [ i ] = arguments [ i ]
}
var ret = fn . apply ( this , args )
var cb = args [ args . length - 1 ]
if ( typeof ret === 'function' && ret !== cb ) {
Object . keys ( cb ) . forEach ( function ( k ) {
ret [ k ] = cb [ k ]
} )
}
return ret
2019-11-24 16:44:26 +09:00
}
}
/***/ } )
/******/ } ) ;