2025-08-02 12:09:34 +08:00
'use strict' ;
var obsidian = require ( 'obsidian' ) ;
var require$$0 = require ( 'fs' ) ;
var process$2 = require ( 'node:process' ) ;
2026-05-06 17:32:44 +08:00
var require$$0$1 = require ( 'path' ) ;
2025-08-02 12:09:34 +08:00
var require$$0$2 = require ( 'child_process' ) ;
var require$$0$3 = require ( 'os' ) ;
var require$$0$4 = require ( 'assert' ) ;
var require$$2 = require ( 'events' ) ;
var require$$0$6 = require ( 'buffer' ) ;
var require$$0$5 = require ( 'stream' ) ;
var require$$2$1 = require ( 'util' ) ;
var node _os = require ( 'node:os' ) ;
require ( 'electron' ) ;
2026-05-06 17:32:44 +08:00
var node _buffer = require ( 'node:buffer' ) ;
2025-08-02 12:09:34 +08:00
2026-05-06 17:32:44 +08:00
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : { } ;
2025-08-02 12:09:34 +08:00
2026-05-06 17:32:44 +08:00
function getDefaultExportFromCjs ( x ) {
return x && x . _ _esModule && Object . prototype . hasOwnProperty . call ( x , 'default' ) ? x [ 'default' ] : x ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
function assertPath ( path ) {
if ( typeof path !== 'string' ) {
throw new TypeError ( 'Path must be a string. Received ' + JSON . stringify ( path ) ) ;
}
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
// Resolves . and .. elements in a path with directory names
function normalizeStringPosix ( path , allowAboveRoot ) {
var res = '' ;
var lastSegmentLength = 0 ;
var lastSlash = - 1 ;
var dots = 0 ;
var code ;
for ( var i = 0 ; i <= path . length ; ++ i ) {
if ( i < path . length )
code = path . charCodeAt ( i ) ;
else if ( code === 47 /*/*/ )
break ;
else
code = 47 /*/*/ ;
if ( code === 47 /*/*/ ) {
if ( lastSlash === i - 1 || dots === 1 ) ; else if ( lastSlash !== i - 1 && dots === 2 ) {
if ( res . length < 2 || lastSegmentLength !== 2 || res . charCodeAt ( res . length - 1 ) !== 46 /*.*/ || res . charCodeAt ( res . length - 2 ) !== 46 /*.*/ ) {
if ( res . length > 2 ) {
var lastSlashIndex = res . lastIndexOf ( '/' ) ;
if ( lastSlashIndex !== res . length - 1 ) {
if ( lastSlashIndex === - 1 ) {
res = '' ;
lastSegmentLength = 0 ;
} else {
res = res . slice ( 0 , lastSlashIndex ) ;
lastSegmentLength = res . length - 1 - res . lastIndexOf ( '/' ) ;
}
lastSlash = i ;
dots = 0 ;
continue ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
} else if ( res . length === 2 || res . length === 1 ) {
res = '' ;
lastSegmentLength = 0 ;
lastSlash = i ;
dots = 0 ;
continue ;
}
}
if ( allowAboveRoot ) {
if ( res . length > 0 )
res += '/..' ;
else
res = '..' ;
lastSegmentLength = 2 ;
}
} else {
if ( res . length > 0 )
res += '/' + path . slice ( lastSlash + 1 , i ) ;
else
res = path . slice ( lastSlash + 1 , i ) ;
lastSegmentLength = i - lastSlash - 1 ;
}
lastSlash = i ;
dots = 0 ;
} else if ( code === 46 /*.*/ && dots !== - 1 ) {
++ dots ;
} else {
dots = - 1 ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
}
return res ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
function _format ( sep , pathObject ) {
var dir = pathObject . dir || pathObject . root ;
var base = pathObject . base || ( pathObject . name || '' ) + ( pathObject . ext || '' ) ;
if ( ! dir ) {
return base ;
}
if ( dir === pathObject . root ) {
return dir + base ;
}
return dir + sep + base ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
var posix = {
// path.resolve([from ...], to)
resolve : function resolve ( ) {
var resolvedPath = '' ;
var resolvedAbsolute = false ;
var cwd ;
for ( var i = arguments . length - 1 ; i >= - 1 && ! resolvedAbsolute ; i -- ) {
var path ;
if ( i >= 0 )
path = arguments [ i ] ;
else {
if ( cwd === undefined )
cwd = process . cwd ( ) ;
path = cwd ;
}
assertPath ( path ) ;
// Skip empty entries
if ( path . length === 0 ) {
continue ;
}
resolvedPath = path + '/' + resolvedPath ;
resolvedAbsolute = path . charCodeAt ( 0 ) === 47 /*/*/ ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
// At this point the path should be resolved to a full absolute path, but
// handle relative paths to be safe (might happen when process.cwd() fails)
// Normalize the path
resolvedPath = normalizeStringPosix ( resolvedPath , ! resolvedAbsolute ) ;
if ( resolvedAbsolute ) {
if ( resolvedPath . length > 0 )
return '/' + resolvedPath ;
else
return '/' ;
} else if ( resolvedPath . length > 0 ) {
return resolvedPath ;
} else {
return '.' ;
}
} ,
normalize : function normalize ( path ) {
assertPath ( path ) ;
if ( path . length === 0 ) return '.' ;
var isAbsolute = path . charCodeAt ( 0 ) === 47 /*/*/ ;
var trailingSeparator = path . charCodeAt ( path . length - 1 ) === 47 /*/*/ ;
// Normalize the path
path = normalizeStringPosix ( path , ! isAbsolute ) ;
if ( path . length === 0 && ! isAbsolute ) path = '.' ;
if ( path . length > 0 && trailingSeparator ) path += '/' ;
if ( isAbsolute ) return '/' + path ;
return path ;
} ,
isAbsolute : function isAbsolute ( path ) {
assertPath ( path ) ;
return path . length > 0 && path . charCodeAt ( 0 ) === 47 /*/*/ ;
} ,
join : function join ( ) {
if ( arguments . length === 0 )
return '.' ;
var joined ;
for ( var i = 0 ; i < arguments . length ; ++ i ) {
var arg = arguments [ i ] ;
assertPath ( arg ) ;
if ( arg . length > 0 ) {
if ( joined === undefined )
joined = arg ;
else
joined += '/' + arg ;
}
}
if ( joined === undefined )
return '.' ;
return posix . normalize ( joined ) ;
} ,
relative : function relative ( from , to ) {
assertPath ( from ) ;
assertPath ( to ) ;
if ( from === to ) return '' ;
from = posix . resolve ( from ) ;
to = posix . resolve ( to ) ;
if ( from === to ) return '' ;
// Trim any leading backslashes
var fromStart = 1 ;
for ( ; fromStart < from . length ; ++ fromStart ) {
if ( from . charCodeAt ( fromStart ) !== 47 /*/*/ )
break ;
}
var fromEnd = from . length ;
var fromLen = fromEnd - fromStart ;
// Trim any leading backslashes
var toStart = 1 ;
for ( ; toStart < to . length ; ++ toStart ) {
if ( to . charCodeAt ( toStart ) !== 47 /*/*/ )
break ;
}
var toEnd = to . length ;
var toLen = toEnd - toStart ;
// Compare paths to find the longest common path from root
var length = fromLen < toLen ? fromLen : toLen ;
var lastCommonSep = - 1 ;
var i = 0 ;
for ( ; i <= length ; ++ i ) {
if ( i === length ) {
if ( toLen > length ) {
if ( to . charCodeAt ( toStart + i ) === 47 /*/*/ ) {
// We get here if `from` is the exact base path for `to`.
// For example: from='/foo/bar'; to='/foo/bar/baz'
return to . slice ( toStart + i + 1 ) ;
} else if ( i === 0 ) {
// We get here if `from` is the root
// For example: from='/'; to='/foo'
return to . slice ( toStart + i ) ;
}
} else if ( fromLen > length ) {
if ( from . charCodeAt ( fromStart + i ) === 47 /*/*/ ) {
// We get here if `to` is the exact base path for `from`.
// For example: from='/foo/bar/baz'; to='/foo/bar'
lastCommonSep = i ;
} else if ( i === 0 ) {
// We get here if `to` is the root.
// For example: from='/foo'; to='/'
lastCommonSep = 0 ;
}
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
break ;
}
var fromCode = from . charCodeAt ( fromStart + i ) ;
var toCode = to . charCodeAt ( toStart + i ) ;
if ( fromCode !== toCode )
break ;
else if ( fromCode === 47 /*/*/ )
lastCommonSep = i ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
var out = '' ;
// Generate the relative path based on the path difference between `to`
// and `from`
for ( i = fromStart + lastCommonSep + 1 ; i <= fromEnd ; ++ i ) {
if ( i === fromEnd || from . charCodeAt ( i ) === 47 /*/*/ ) {
if ( out . length === 0 )
out += '..' ;
else
out += '/..' ;
}
}
// Lastly, append the rest of the destination (`to`) path that comes after
// the common path parts
if ( out . length > 0 )
return out + to . slice ( toStart + lastCommonSep ) ;
else {
toStart += lastCommonSep ;
if ( to . charCodeAt ( toStart ) === 47 /*/*/ )
++ toStart ;
return to . slice ( toStart ) ;
}
} ,
_makeLong : function _makeLong ( path ) {
return path ;
} ,
dirname : function dirname ( path ) {
assertPath ( path ) ;
if ( path . length === 0 ) return '.' ;
var code = path . charCodeAt ( 0 ) ;
var hasRoot = code === 47 /*/*/ ;
var end = - 1 ;
var matchedSlash = true ;
for ( var i = path . length - 1 ; i >= 1 ; -- i ) {
code = path . charCodeAt ( i ) ;
if ( code === 47 /*/*/ ) {
if ( ! matchedSlash ) {
end = i ;
break ;
}
} else {
// We saw the first non-path separator
matchedSlash = false ;
}
}
if ( end === - 1 ) return hasRoot ? '/' : '.' ;
if ( hasRoot && end === 1 ) return '//' ;
return path . slice ( 0 , end ) ;
} ,
basename : function basename ( path , ext ) {
if ( ext !== undefined && typeof ext !== 'string' ) throw new TypeError ( '"ext" argument must be a string' ) ;
assertPath ( path ) ;
var start = 0 ;
var end = - 1 ;
var matchedSlash = true ;
var i ;
if ( ext !== undefined && ext . length > 0 && ext . length <= path . length ) {
if ( ext . length === path . length && ext === path ) return '' ;
var extIdx = ext . length - 1 ;
var firstNonSlashEnd = - 1 ;
for ( i = path . length - 1 ; i >= 0 ; -- i ) {
var code = path . charCodeAt ( i ) ;
if ( code === 47 /*/*/ ) {
// If we reached a path separator that was not part of a set of path
// separators at the end of the string, stop now
if ( ! matchedSlash ) {
start = i + 1 ;
break ;
}
} else {
if ( firstNonSlashEnd === - 1 ) {
// We saw the first non-path separator, remember this index in case
// we need it if the extension ends up not matching
matchedSlash = false ;
firstNonSlashEnd = i + 1 ;
}
if ( extIdx >= 0 ) {
// Try to match the explicit extension
if ( code === ext . charCodeAt ( extIdx ) ) {
if ( -- extIdx === - 1 ) {
// We matched the extension, so mark this as the end of our path
// component
end = i ;
}
} else {
// Extension does not match, so our result is the entire path
// component
extIdx = - 1 ;
end = firstNonSlashEnd ;
}
}
}
}
if ( start === end ) end = firstNonSlashEnd ; else if ( end === - 1 ) end = path . length ;
return path . slice ( start , end ) ;
} else {
for ( i = path . length - 1 ; i >= 0 ; -- i ) {
if ( path . charCodeAt ( i ) === 47 /*/*/ ) {
// If we reached a path separator that was not part of a set of path
// separators at the end of the string, stop now
if ( ! matchedSlash ) {
start = i + 1 ;
break ;
}
} else if ( end === - 1 ) {
// We saw the first non-path separator, mark this as the end of our
// path component
matchedSlash = false ;
end = i + 1 ;
}
}
if ( end === - 1 ) return '' ;
return path . slice ( start , end ) ;
}
} ,
extname : function extname ( path ) {
assertPath ( path ) ;
var startDot = - 1 ;
var startPart = 0 ;
var end = - 1 ;
var matchedSlash = true ;
// Track the state of characters (if any) we see before our first dot and
// after any path separator we find
var preDotState = 0 ;
for ( var i = path . length - 1 ; i >= 0 ; -- i ) {
var code = path . charCodeAt ( i ) ;
if ( code === 47 /*/*/ ) {
// If we reached a path separator that was not part of a set of path
// separators at the end of the string, stop now
if ( ! matchedSlash ) {
startPart = i + 1 ;
break ;
}
continue ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
if ( end === - 1 ) {
// We saw the first non-path separator, mark this as the end of our
// extension
matchedSlash = false ;
end = i + 1 ;
}
if ( code === 46 /*.*/ ) {
// If this is our first dot, mark it as the start of our extension
if ( startDot === - 1 )
startDot = i ;
else if ( preDotState !== 1 )
preDotState = 1 ;
} else if ( startDot !== - 1 ) {
// We saw a non-dot and non-path separator before our dot, so we should
// have a good chance at having a non-empty extension
preDotState = - 1 ;
}
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
if ( startDot === - 1 || end === - 1 ||
// We saw a non-dot character immediately before the dot
preDotState === 0 ||
// The (right-most) trimmed path component is exactly '..'
preDotState === 1 && startDot === end - 1 && startDot === startPart + 1 ) {
return '' ;
}
return path . slice ( startDot , end ) ;
} ,
2025-08-02 12:09:34 +08:00
2026-05-06 17:32:44 +08:00
format : function format ( pathObject ) {
if ( pathObject === null || typeof pathObject !== 'object' ) {
throw new TypeError ( 'The "pathObject" argument must be of type Object. Received type ' + typeof pathObject ) ;
}
return _format ( '/' , pathObject ) ;
} ,
parse : function parse ( path ) {
assertPath ( path ) ;
var ret = { root : '' , dir : '' , base : '' , ext : '' , name : '' } ;
if ( path . length === 0 ) return ret ;
var code = path . charCodeAt ( 0 ) ;
var isAbsolute = code === 47 /*/*/ ;
var start ;
if ( isAbsolute ) {
ret . root = '/' ;
start = 1 ;
} else {
start = 0 ;
}
var startDot = - 1 ;
var startPart = 0 ;
var end = - 1 ;
var matchedSlash = true ;
var i = path . length - 1 ;
// Track the state of characters (if any) we see before our first dot and
// after any path separator we find
var preDotState = 0 ;
// Get non-dir info
for ( ; i >= start ; -- i ) {
code = path . charCodeAt ( i ) ;
if ( code === 47 /*/*/ ) {
// If we reached a path separator that was not part of a set of path
// separators at the end of the string, stop now
if ( ! matchedSlash ) {
startPart = i + 1 ;
break ;
}
continue ;
}
if ( end === - 1 ) {
// We saw the first non-path separator, mark this as the end of our
// extension
matchedSlash = false ;
end = i + 1 ;
}
if ( code === 46 /*.*/ ) {
// If this is our first dot, mark it as the start of our extension
if ( startDot === - 1 ) startDot = i ; else if ( preDotState !== 1 ) preDotState = 1 ;
} else if ( startDot !== - 1 ) {
// We saw a non-dot and non-path separator before our dot, so we should
// have a good chance at having a non-empty extension
preDotState = - 1 ;
}
}
2025-08-02 12:09:34 +08:00
2026-05-06 17:32:44 +08:00
if ( startDot === - 1 || end === - 1 ||
// We saw a non-dot character immediately before the dot
preDotState === 0 ||
// The (right-most) trimmed path component is exactly '..'
preDotState === 1 && startDot === end - 1 && startDot === startPart + 1 ) {
if ( end !== - 1 ) {
if ( startPart === 0 && isAbsolute ) ret . base = ret . name = path . slice ( 1 , end ) ; else ret . base = ret . name = path . slice ( startPart , end ) ;
}
} else {
if ( startPart === 0 && isAbsolute ) {
ret . name = path . slice ( 1 , startDot ) ;
ret . base = path . slice ( 1 , end ) ;
} else {
ret . name = path . slice ( startPart , startDot ) ;
ret . base = path . slice ( startPart , end ) ;
}
ret . ext = path . slice ( startDot , end ) ;
}
if ( startPart > 0 ) ret . dir = path . slice ( 0 , startPart - 1 ) ; else if ( isAbsolute ) ret . dir = '/' ;
return ret ;
} ,
sep : '/' ,
delimiter : ':' ,
win32 : null ,
posix : null
} ;
posix . posix = posix ;
var pathBrowserify = posix ;
2025-08-02 12:09:34 +08:00
var execa$2 = { exports : { } } ;
var crossSpawn$1 = { exports : { } } ;
var windows ;
var hasRequiredWindows ;
function requireWindows ( ) {
if ( hasRequiredWindows ) return windows ;
hasRequiredWindows = 1 ;
windows = isexe ;
isexe . sync = sync ;
var fs = require$$0 ;
function checkPathExt ( path , options ) {
var pathext = options . pathExt !== undefined ?
options . pathExt : process . env . PATHEXT ;
if ( ! pathext ) {
return true
}
pathext = pathext . split ( ';' ) ;
if ( pathext . indexOf ( '' ) !== - 1 ) {
return true
}
for ( var i = 0 ; i < pathext . length ; i ++ ) {
var p = pathext [ i ] . toLowerCase ( ) ;
if ( p && path . substr ( - p . length ) . toLowerCase ( ) === p ) {
return true
}
}
return false
}
function checkStat ( stat , path , options ) {
if ( ! stat . isSymbolicLink ( ) && ! stat . isFile ( ) ) {
return false
}
return checkPathExt ( path , options )
}
function isexe ( path , options , cb ) {
fs . stat ( path , function ( er , stat ) {
cb ( er , er ? false : checkStat ( stat , path , options ) ) ;
} ) ;
}
function sync ( path , options ) {
return checkStat ( fs . statSync ( path ) , path , options )
}
return windows ;
}
var mode ;
var hasRequiredMode ;
function requireMode ( ) {
if ( hasRequiredMode ) return mode ;
hasRequiredMode = 1 ;
mode = isexe ;
isexe . sync = sync ;
var fs = require$$0 ;
function isexe ( path , options , cb ) {
fs . stat ( path , function ( er , stat ) {
cb ( er , er ? false : checkStat ( stat , options ) ) ;
} ) ;
}
function sync ( path , options ) {
return checkStat ( fs . statSync ( path ) , options )
}
function checkStat ( stat , options ) {
return stat . isFile ( ) && checkMode ( stat , options )
}
function checkMode ( stat , options ) {
var mod = stat . mode ;
var uid = stat . uid ;
var gid = stat . gid ;
var myUid = options . uid !== undefined ?
options . uid : process . getuid && process . getuid ( ) ;
var myGid = options . gid !== undefined ?
options . gid : process . getgid && process . getgid ( ) ;
var u = parseInt ( '100' , 8 ) ;
var g = parseInt ( '010' , 8 ) ;
var o = parseInt ( '001' , 8 ) ;
var ug = u | g ;
var ret = ( mod & o ) ||
( mod & g ) && gid === myGid ||
( mod & u ) && uid === myUid ||
( mod & ug ) && myUid === 0 ;
return ret
}
return mode ;
}
var core$1 ;
if ( process . platform === 'win32' || commonjsGlobal . TESTING _WINDOWS ) {
core$1 = requireWindows ( ) ;
} else {
core$1 = requireMode ( ) ;
}
var isexe _1 = isexe$1 ;
isexe$1 . sync = sync ;
function isexe$1 ( path , options , cb ) {
if ( typeof options === 'function' ) {
cb = options ;
options = { } ;
}
if ( ! cb ) {
if ( typeof Promise !== 'function' ) {
throw new TypeError ( 'callback not provided' )
}
return new Promise ( function ( resolve , reject ) {
isexe$1 ( path , options || { } , function ( er , is ) {
if ( er ) {
reject ( er ) ;
} else {
resolve ( is ) ;
}
} ) ;
} )
}
core$1 ( path , options || { } , function ( er , is ) {
// ignore EACCES because that just means we aren't allowed to run it
if ( er ) {
if ( er . code === 'EACCES' || options && options . ignoreErrors ) {
er = null ;
is = false ;
}
}
cb ( er , is ) ;
} ) ;
}
function sync ( path , options ) {
// my kingdom for a filtered catch
try {
return core$1 . sync ( path , options || { } )
} catch ( er ) {
if ( options && options . ignoreErrors || er . code === 'EACCES' ) {
return false
} else {
throw er
}
}
}
const isWindows = process . platform === 'win32' ||
process . env . OSTYPE === 'cygwin' ||
process . env . OSTYPE === 'msys' ;
const path$3 = require$$0$1 ;
const COLON = isWindows ? ';' : ':' ;
const isexe = isexe _1 ;
const getNotFoundError = ( cmd ) =>
Object . assign ( new Error ( ` not found: ${ cmd } ` ) , { code : 'ENOENT' } ) ;
const getPathInfo = ( cmd , opt ) => {
const colon = opt . colon || COLON ;
// If it has a slash, then we don't bother searching the pathenv.
// just check the file itself, and that's it.
const pathEnv = cmd . match ( /\// ) || isWindows && cmd . match ( /\\/ ) ? [ '' ]
: (
[
// windows always checks the cwd first
... ( isWindows ? [ process . cwd ( ) ] : [ ] ) ,
... ( opt . path || process . env . PATH ||
/* istanbul ignore next: very unusual */ '' ) . split ( colon ) ,
]
) ;
const pathExtExe = isWindows
? opt . pathExt || process . env . PATHEXT || '.EXE;.CMD;.BAT;.COM'
: '' ;
const pathExt = isWindows ? pathExtExe . split ( colon ) : [ '' ] ;
if ( isWindows ) {
if ( cmd . indexOf ( '.' ) !== - 1 && pathExt [ 0 ] !== '' )
pathExt . unshift ( '' ) ;
}
return {
pathEnv ,
pathExt ,
pathExtExe ,
}
} ;
const which$1 = ( cmd , opt , cb ) => {
if ( typeof opt === 'function' ) {
cb = opt ;
opt = { } ;
}
if ( ! opt )
opt = { } ;
const { pathEnv , pathExt , pathExtExe } = getPathInfo ( cmd , opt ) ;
const found = [ ] ;
const step = i => new Promise ( ( resolve , reject ) => {
if ( i === pathEnv . length )
return opt . all && found . length ? resolve ( found )
: reject ( getNotFoundError ( cmd ) )
const ppRaw = pathEnv [ i ] ;
const pathPart = /^".*"$/ . test ( ppRaw ) ? ppRaw . slice ( 1 , - 1 ) : ppRaw ;
const pCmd = path$3 . join ( pathPart , cmd ) ;
const p = ! pathPart && /^\.[\\\/]/ . test ( cmd ) ? cmd . slice ( 0 , 2 ) + pCmd
: pCmd ;
resolve ( subStep ( p , i , 0 ) ) ;
} ) ;
const subStep = ( p , i , ii ) => new Promise ( ( resolve , reject ) => {
if ( ii === pathExt . length )
return resolve ( step ( i + 1 ) )
const ext = pathExt [ ii ] ;
isexe ( p + ext , { pathExt : pathExtExe } , ( er , is ) => {
if ( ! er && is ) {
if ( opt . all )
found . push ( p + ext ) ;
else
return resolve ( p + ext )
}
return resolve ( subStep ( p , i , ii + 1 ) )
} ) ;
} ) ;
return cb ? step ( 0 ) . then ( res => cb ( null , res ) , cb ) : step ( 0 )
} ;
const whichSync = ( cmd , opt ) => {
opt = opt || { } ;
const { pathEnv , pathExt , pathExtExe } = getPathInfo ( cmd , opt ) ;
const found = [ ] ;
for ( let i = 0 ; i < pathEnv . length ; i ++ ) {
const ppRaw = pathEnv [ i ] ;
const pathPart = /^".*"$/ . test ( ppRaw ) ? ppRaw . slice ( 1 , - 1 ) : ppRaw ;
const pCmd = path$3 . join ( pathPart , cmd ) ;
const p = ! pathPart && /^\.[\\\/]/ . test ( cmd ) ? cmd . slice ( 0 , 2 ) + pCmd
: pCmd ;
for ( let j = 0 ; j < pathExt . length ; j ++ ) {
const cur = p + pathExt [ j ] ;
try {
const is = isexe . sync ( cur , { pathExt : pathExtExe } ) ;
if ( is ) {
if ( opt . all )
found . push ( cur ) ;
else
return cur
}
} catch ( ex ) { }
}
}
if ( opt . all && found . length )
return found
if ( opt . nothrow )
return null
throw getNotFoundError ( cmd )
} ;
var which _1 = which$1 ;
which$1 . sync = whichSync ;
var pathKey$1 = { exports : { } } ;
const pathKey = ( options = { } ) => {
const environment = options . env || process . env ;
const platform = options . platform || process . platform ;
if ( platform !== 'win32' ) {
return 'PATH' ;
}
return Object . keys ( environment ) . reverse ( ) . find ( key => key . toUpperCase ( ) === 'PATH' ) || 'Path' ;
} ;
pathKey$1 . exports = pathKey ;
// TODO: Remove this for the next major release
pathKey$1 . exports . default = pathKey ;
var pathKeyExports = pathKey$1 . exports ;
const path$2 = require$$0$1 ;
const which = which _1 ;
const getPathKey = pathKeyExports ;
function resolveCommandAttempt ( parsed , withoutPathExt ) {
const env = parsed . options . env || process . env ;
const cwd = process . cwd ( ) ;
const hasCustomCwd = parsed . options . cwd != null ;
// Worker threads do not have process.chdir()
const shouldSwitchCwd = hasCustomCwd && process . chdir !== undefined && ! process . chdir . disabled ;
// If a custom `cwd` was specified, we need to change the process cwd
// because `which` will do stat calls but does not support a custom cwd
if ( shouldSwitchCwd ) {
try {
process . chdir ( parsed . options . cwd ) ;
} catch ( err ) {
/* Empty */
}
}
let resolved ;
try {
resolved = which . sync ( parsed . command , {
path : env [ getPathKey ( { env } ) ] ,
pathExt : withoutPathExt ? path$2 . delimiter : undefined ,
} ) ;
} catch ( e ) {
/* Empty */
} finally {
if ( shouldSwitchCwd ) {
process . chdir ( cwd ) ;
}
}
// If we successfully resolved, ensure that an absolute path is returned
// Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
if ( resolved ) {
resolved = path$2 . resolve ( hasCustomCwd ? parsed . options . cwd : '' , resolved ) ;
}
return resolved ;
}
function resolveCommand$1 ( parsed ) {
return resolveCommandAttempt ( parsed ) || resolveCommandAttempt ( parsed , true ) ;
}
var resolveCommand _1 = resolveCommand$1 ;
var _escape = { } ;
// See http://www.robvanderwoude.com/escapechars.php
const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g ;
function escapeCommand ( arg ) {
// Escape meta chars
arg = arg . replace ( metaCharsRegExp , '^$1' ) ;
return arg ;
}
function escapeArgument ( arg , doubleEscapeMetaChars ) {
// Convert to string
arg = ` ${ arg } ` ;
// Algorithm below is based on https://qntm.org/cmd
// Sequence of backslashes followed by a double quote:
// double up all the backslashes and escape the double quote
arg = arg . replace ( /(\\*)"/g , '$1$1\\"' ) ;
// Sequence of backslashes followed by the end of the string
// (which will become a double quote later):
// double up all the backslashes
arg = arg . replace ( /(\\*)$/ , '$1$1' ) ;
// All other backslashes occur literally
// Quote the whole thing:
arg = ` " ${ arg } " ` ;
// Escape meta chars
arg = arg . replace ( metaCharsRegExp , '^$1' ) ;
// Double escape meta chars if necessary
if ( doubleEscapeMetaChars ) {
arg = arg . replace ( metaCharsRegExp , '^$1' ) ;
}
return arg ;
}
_escape . command = escapeCommand ;
_escape . argument = escapeArgument ;
var shebangRegex$1 = /^#!(.*)/ ;
const shebangRegex = shebangRegex$1 ;
var shebangCommand$1 = ( string = '' ) => {
const match = string . match ( shebangRegex ) ;
if ( ! match ) {
return null ;
}
const [ path , argument ] = match [ 0 ] . replace ( /#! ?/ , '' ) . split ( ' ' ) ;
const binary = path . split ( '/' ) . pop ( ) ;
if ( binary === 'env' ) {
return argument ;
}
return argument ? ` ${ binary } ${ argument } ` : binary ;
} ;
const fs = require$$0 ;
const shebangCommand = shebangCommand$1 ;
function readShebang$1 ( command ) {
// Read the first 150 bytes from the file
const size = 150 ;
const buffer = Buffer . alloc ( size ) ;
let fd ;
try {
fd = fs . openSync ( command , 'r' ) ;
fs . readSync ( fd , buffer , 0 , size , 0 ) ;
fs . closeSync ( fd ) ;
} catch ( e ) { /* Empty */ }
// Attempt to extract shebang (null is returned if not a shebang)
return shebangCommand ( buffer . toString ( ) ) ;
}
var readShebang _1 = readShebang$1 ;
const path$1 = require$$0$1 ;
const resolveCommand = resolveCommand _1 ;
const escape = _escape ;
const readShebang = readShebang _1 ;
const isWin$2 = process . platform === 'win32' ;
const isExecutableRegExp = /\.(?:com|exe)$/i ;
const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i ;
function detectShebang ( parsed ) {
parsed . file = resolveCommand ( parsed ) ;
const shebang = parsed . file && readShebang ( parsed . file ) ;
if ( shebang ) {
parsed . args . unshift ( parsed . file ) ;
parsed . command = shebang ;
return resolveCommand ( parsed ) ;
}
return parsed . file ;
}
function parseNonShell ( parsed ) {
if ( ! isWin$2 ) {
return parsed ;
}
// Detect & add support for shebangs
const commandFile = detectShebang ( parsed ) ;
// We don't need a shell if the command filename is an executable
const needsShell = ! isExecutableRegExp . test ( commandFile ) ;
// If a shell is required, use cmd.exe and take care of escaping everything correctly
// Note that `forceShell` is an hidden option used only in tests
if ( parsed . options . forceShell || needsShell ) {
// Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
// The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
// Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
// we need to double escape them
const needsDoubleEscapeMetaChars = isCmdShimRegExp . test ( commandFile ) ;
// Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
// This is necessary otherwise it will always fail with ENOENT in those cases
parsed . command = path$1 . normalize ( parsed . command ) ;
// Escape command & arguments
parsed . command = escape . command ( parsed . command ) ;
parsed . args = parsed . args . map ( ( arg ) => escape . argument ( arg , needsDoubleEscapeMetaChars ) ) ;
const shellCommand = [ parsed . command ] . concat ( parsed . args ) . join ( ' ' ) ;
parsed . args = [ '/d' , '/s' , '/c' , ` " ${ shellCommand } " ` ] ;
parsed . command = process . env . comspec || 'cmd.exe' ;
parsed . options . windowsVerbatimArguments = true ; // Tell node's spawn that the arguments are already escaped
}
return parsed ;
}
function parse$1 ( command , args , options ) {
// Normalize arguments, similar to nodejs
if ( args && ! Array . isArray ( args ) ) {
options = args ;
args = null ;
}
args = args ? args . slice ( 0 ) : [ ] ; // Clone array to avoid changing the original
options = Object . assign ( { } , options ) ; // Clone object to avoid changing the original
// Build our parsed object
const parsed = {
command ,
args ,
options ,
file : undefined ,
original : {
command ,
args ,
} ,
} ;
// Delegate further parsing to shell or non-shell
return options . shell ? parsed : parseNonShell ( parsed ) ;
}
var parse _1 = parse$1 ;
const isWin$1 = process . platform === 'win32' ;
function notFoundError ( original , syscall ) {
return Object . assign ( new Error ( ` ${ syscall } ${ original . command } ENOENT ` ) , {
code : 'ENOENT' ,
errno : 'ENOENT' ,
syscall : ` ${ syscall } ${ original . command } ` ,
path : original . command ,
spawnargs : original . args ,
} ) ;
}
function hookChildProcess ( cp , parsed ) {
if ( ! isWin$1 ) {
return ;
}
const originalEmit = cp . emit ;
cp . emit = function ( name , arg1 ) {
// If emitting "exit" event and exit code is 1, we need to check if
// the command exists and emit an "error" instead
// See https://github.com/IndigoUnited/node-cross-spawn/issues/16
if ( name === 'exit' ) {
const err = verifyENOENT ( arg1 , parsed ) ;
if ( err ) {
return originalEmit . call ( cp , 'error' , err ) ;
}
}
return originalEmit . apply ( cp , arguments ) ; // eslint-disable-line prefer-rest-params
} ;
}
function verifyENOENT ( status , parsed ) {
if ( isWin$1 && status === 1 && ! parsed . file ) {
return notFoundError ( parsed . original , 'spawn' ) ;
}
return null ;
}
function verifyENOENTSync ( status , parsed ) {
if ( isWin$1 && status === 1 && ! parsed . file ) {
return notFoundError ( parsed . original , 'spawnSync' ) ;
}
return null ;
}
var enoent$1 = {
hookChildProcess ,
verifyENOENT ,
verifyENOENTSync ,
notFoundError ,
} ;
const cp = require$$0$2 ;
const parse = parse _1 ;
const enoent = enoent$1 ;
function spawn ( command , args , options ) {
// Parse the arguments
const parsed = parse ( command , args , options ) ;
// Spawn the child process
const spawned = cp . spawn ( parsed . command , parsed . args , parsed . options ) ;
// Hook into child process "exit" event to emit an error if the command
// does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
enoent . hookChildProcess ( spawned , parsed ) ;
return spawned ;
}
function spawnSync ( command , args , options ) {
// Parse the arguments
const parsed = parse ( command , args , options ) ;
// Spawn the child process
const result = cp . spawnSync ( parsed . command , parsed . args , parsed . options ) ;
// Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
result . error = result . error || enoent . verifyENOENTSync ( result . status , parsed ) ;
return result ;
}
crossSpawn$1 . exports = spawn ;
crossSpawn$1 . exports . spawn = spawn ;
crossSpawn$1 . exports . sync = spawnSync ;
crossSpawn$1 . exports . _parse = parse ;
crossSpawn$1 . exports . _enoent = enoent ;
var crossSpawnExports = crossSpawn$1 . exports ;
var stripFinalNewline$1 = input => {
const LF = typeof input === 'string' ? '\n' : '\n' . charCodeAt ( ) ;
const CR = typeof input === 'string' ? '\r' : '\r' . charCodeAt ( ) ;
if ( input [ input . length - 1 ] === LF ) {
input = input . slice ( 0 , input . length - 1 ) ;
}
if ( input [ input . length - 1 ] === CR ) {
input = input . slice ( 0 , input . length - 1 ) ;
}
return input ;
} ;
var npmRunPath$1 = { exports : { } } ;
npmRunPath$1 . exports ;
( function ( module ) {
const path = require$$0$1 ;
const pathKey = pathKeyExports ;
const npmRunPath = options => {
options = {
cwd : process . cwd ( ) ,
path : process . env [ pathKey ( ) ] ,
execPath : process . execPath ,
... options
} ;
let previous ;
let cwdPath = path . resolve ( options . cwd ) ;
const result = [ ] ;
while ( previous !== cwdPath ) {
result . push ( path . join ( cwdPath , 'node_modules/.bin' ) ) ;
previous = cwdPath ;
cwdPath = path . resolve ( cwdPath , '..' ) ;
}
// Ensure the running `node` binary is used
const execPathDir = path . resolve ( options . cwd , options . execPath , '..' ) ;
result . push ( execPathDir ) ;
return result . concat ( options . path ) . join ( path . delimiter ) ;
} ;
module . exports = npmRunPath ;
// TODO: Remove this for the next major release
module . exports . default = npmRunPath ;
module . exports . env = options => {
options = {
env : process . env ,
... options
} ;
const env = { ... options . env } ;
const path = pathKey ( { env } ) ;
options . path = env [ path ] ;
env [ path ] = module . exports ( options ) ;
return env ;
} ;
} ( npmRunPath$1 ) ) ;
var npmRunPathExports = npmRunPath$1 . exports ;
var onetime$2 = { exports : { } } ;
var mimicFn$2 = { exports : { } } ;
const mimicFn$1 = ( to , from ) => {
for ( const prop of Reflect . ownKeys ( from ) ) {
Object . defineProperty ( to , prop , Object . getOwnPropertyDescriptor ( from , prop ) ) ;
}
return to ;
} ;
mimicFn$2 . exports = mimicFn$1 ;
// TODO: Remove this for the next major release
mimicFn$2 . exports . default = mimicFn$1 ;
var mimicFnExports = mimicFn$2 . exports ;
const mimicFn = mimicFnExports ;
const calledFunctions = new WeakMap ( ) ;
const onetime$1 = ( function _ , options = { } ) => {
if ( typeof function _ !== 'function' ) {
throw new TypeError ( 'Expected a function' ) ;
}
let returnValue ;
let callCount = 0 ;
const functionName = function _ . displayName || function _ . name || '<anonymous>' ;
const onetime = function ( ... arguments _ ) {
calledFunctions . set ( onetime , ++ callCount ) ;
if ( callCount === 1 ) {
returnValue = function _ . apply ( this , arguments _ ) ;
function _ = null ;
} else if ( options . throw === true ) {
throw new Error ( ` Function \` ${ functionName } \` can only be called once ` ) ;
}
return returnValue ;
} ;
mimicFn ( onetime , function _ ) ;
calledFunctions . set ( onetime , callCount ) ;
return onetime ;
} ;
onetime$2 . exports = onetime$1 ;
// TODO: Remove this for the next major release
onetime$2 . exports . default = onetime$1 ;
onetime$2 . exports . callCount = function _ => {
if ( ! calledFunctions . has ( function _ ) ) {
throw new Error ( ` The given function \` ${ function _ . name } \` is not wrapped by the \` onetime \` package ` ) ;
}
return calledFunctions . get ( function _ ) ;
} ;
var onetimeExports = onetime$2 . exports ;
var main = { } ;
var signals$2 = { } ;
var core = { } ;
Object . defineProperty ( core , "__esModule" , { value : true } ) ; core . SIGNALS = void 0 ;
const SIGNALS = [
{
name : "SIGHUP" ,
number : 1 ,
action : "terminate" ,
description : "Terminal closed" ,
standard : "posix" } ,
{
name : "SIGINT" ,
number : 2 ,
action : "terminate" ,
description : "User interruption with CTRL-C" ,
standard : "ansi" } ,
{
name : "SIGQUIT" ,
number : 3 ,
action : "core" ,
description : "User interruption with CTRL-\\" ,
standard : "posix" } ,
{
name : "SIGILL" ,
number : 4 ,
action : "core" ,
description : "Invalid machine instruction" ,
standard : "ansi" } ,
{
name : "SIGTRAP" ,
number : 5 ,
action : "core" ,
description : "Debugger breakpoint" ,
standard : "posix" } ,
{
name : "SIGABRT" ,
number : 6 ,
action : "core" ,
description : "Aborted" ,
standard : "ansi" } ,
{
name : "SIGIOT" ,
number : 6 ,
action : "core" ,
description : "Aborted" ,
standard : "bsd" } ,
{
name : "SIGBUS" ,
number : 7 ,
action : "core" ,
description :
"Bus error due to misaligned, non-existing address or paging error" ,
standard : "bsd" } ,
{
name : "SIGEMT" ,
number : 7 ,
action : "terminate" ,
description : "Command should be emulated but is not implemented" ,
standard : "other" } ,
{
name : "SIGFPE" ,
number : 8 ,
action : "core" ,
description : "Floating point arithmetic error" ,
standard : "ansi" } ,
{
name : "SIGKILL" ,
number : 9 ,
action : "terminate" ,
description : "Forced termination" ,
standard : "posix" ,
forced : true } ,
{
name : "SIGUSR1" ,
number : 10 ,
action : "terminate" ,
description : "Application-specific signal" ,
standard : "posix" } ,
{
name : "SIGSEGV" ,
number : 11 ,
action : "core" ,
description : "Segmentation fault" ,
standard : "ansi" } ,
{
name : "SIGUSR2" ,
number : 12 ,
action : "terminate" ,
description : "Application-specific signal" ,
standard : "posix" } ,
{
name : "SIGPIPE" ,
number : 13 ,
action : "terminate" ,
description : "Broken pipe or socket" ,
standard : "posix" } ,
{
name : "SIGALRM" ,
number : 14 ,
action : "terminate" ,
description : "Timeout or timer" ,
standard : "posix" } ,
{
name : "SIGTERM" ,
number : 15 ,
action : "terminate" ,
description : "Termination" ,
standard : "ansi" } ,
{
name : "SIGSTKFLT" ,
number : 16 ,
action : "terminate" ,
description : "Stack is empty or overflowed" ,
standard : "other" } ,
{
name : "SIGCHLD" ,
number : 17 ,
action : "ignore" ,
description : "Child process terminated, paused or unpaused" ,
standard : "posix" } ,
{
name : "SIGCLD" ,
number : 17 ,
action : "ignore" ,
description : "Child process terminated, paused or unpaused" ,
standard : "other" } ,
{
name : "SIGCONT" ,
number : 18 ,
action : "unpause" ,
description : "Unpaused" ,
standard : "posix" ,
forced : true } ,
{
name : "SIGSTOP" ,
number : 19 ,
action : "pause" ,
description : "Paused" ,
standard : "posix" ,
forced : true } ,
{
name : "SIGTSTP" ,
number : 20 ,
action : "pause" ,
description : "Paused using CTRL-Z or \"suspend\"" ,
standard : "posix" } ,
{
name : "SIGTTIN" ,
number : 21 ,
action : "pause" ,
description : "Background process cannot read terminal input" ,
standard : "posix" } ,
{
name : "SIGBREAK" ,
number : 21 ,
action : "terminate" ,
description : "User interruption with CTRL-BREAK" ,
standard : "other" } ,
{
name : "SIGTTOU" ,
number : 22 ,
action : "pause" ,
description : "Background process cannot write to terminal output" ,
standard : "posix" } ,
{
name : "SIGURG" ,
number : 23 ,
action : "ignore" ,
description : "Socket received out-of-band data" ,
standard : "bsd" } ,
{
name : "SIGXCPU" ,
number : 24 ,
action : "core" ,
description : "Process timed out" ,
standard : "bsd" } ,
{
name : "SIGXFSZ" ,
number : 25 ,
action : "core" ,
description : "File too big" ,
standard : "bsd" } ,
{
name : "SIGVTALRM" ,
number : 26 ,
action : "terminate" ,
description : "Timeout or timer" ,
standard : "bsd" } ,
{
name : "SIGPROF" ,
number : 27 ,
action : "terminate" ,
description : "Timeout or timer" ,
standard : "bsd" } ,
{
name : "SIGWINCH" ,
number : 28 ,
action : "ignore" ,
description : "Terminal window size changed" ,
standard : "bsd" } ,
{
name : "SIGIO" ,
number : 29 ,
action : "terminate" ,
description : "I/O is available" ,
standard : "other" } ,
{
name : "SIGPOLL" ,
number : 29 ,
action : "terminate" ,
description : "Watched event" ,
standard : "other" } ,
{
name : "SIGINFO" ,
number : 29 ,
action : "ignore" ,
description : "Request for process information" ,
standard : "other" } ,
{
name : "SIGPWR" ,
number : 30 ,
action : "terminate" ,
description : "Device running out of power" ,
standard : "systemv" } ,
{
name : "SIGSYS" ,
number : 31 ,
action : "core" ,
description : "Invalid system call" ,
standard : "other" } ,
{
name : "SIGUNUSED" ,
number : 31 ,
action : "terminate" ,
description : "Invalid system call" ,
standard : "other" } ] ; core . SIGNALS = SIGNALS ;
var realtime = { } ;
Object . defineProperty ( realtime , "__esModule" , { value : true } ) ; realtime . SIGRTMAX = realtime . getRealtimeSignals = void 0 ;
const getRealtimeSignals = function ( ) {
const length = SIGRTMAX - SIGRTMIN + 1 ;
return Array . from ( { length } , getRealtimeSignal ) ;
} ; realtime . getRealtimeSignals = getRealtimeSignals ;
const getRealtimeSignal = function ( value , index ) {
return {
name : ` SIGRT ${ index + 1 } ` ,
number : SIGRTMIN + index ,
action : "terminate" ,
description : "Application-specific signal (realtime)" ,
standard : "posix" } ;
} ;
const SIGRTMIN = 34 ;
const SIGRTMAX = 64 ; realtime . SIGRTMAX = SIGRTMAX ;
Object . defineProperty ( signals$2 , "__esModule" , { value : true } ) ; signals$2 . getSignals = void 0 ; var _os$1 = require$$0$3 ;
var _core = core ;
var _realtime$1 = realtime ;
const getSignals = function ( ) {
const realtimeSignals = ( 0 , _realtime$1 . getRealtimeSignals ) ( ) ;
const signals = [ ... _core . SIGNALS , ... realtimeSignals ] . map ( normalizeSignal ) ;
return signals ;
} ; signals$2 . getSignals = getSignals ;
const normalizeSignal = function ( {
name ,
number : defaultNumber ,
description ,
action ,
forced = false ,
standard } )
{
const {
signals : { [ name ] : constantSignal } } =
_os$1 . constants ;
const supported = constantSignal !== undefined ;
const number = supported ? constantSignal : defaultNumber ;
return { name , number , description , supported , action , forced , standard } ;
} ;
Object . defineProperty ( main , "__esModule" , { value : true } ) ; main . signalsByNumber = main . signalsByName = void 0 ; var _os = require$$0$3 ;
var _signals = signals$2 ;
var _realtime = realtime ;
const getSignalsByName = function ( ) {
const signals = ( 0 , _signals . getSignals ) ( ) ;
return signals . reduce ( getSignalByName , { } ) ;
} ;
const getSignalByName = function (
signalByNameMemo ,
{ name , number , description , supported , action , forced , standard } )
{
return {
... signalByNameMemo ,
[ name ] : { name , number , description , supported , action , forced , standard } } ;
} ;
const signalsByName$1 = getSignalsByName ( ) ; main . signalsByName = signalsByName$1 ;
const getSignalsByNumber = function ( ) {
const signals = ( 0 , _signals . getSignals ) ( ) ;
const length = _realtime . SIGRTMAX + 1 ;
const signalsA = Array . from ( { length } , ( value , number ) =>
getSignalByNumber ( number , signals ) ) ;
return Object . assign ( { } , ... signalsA ) ;
} ;
const getSignalByNumber = function ( number , signals ) {
const signal = findSignalByNumber ( number , signals ) ;
if ( signal === undefined ) {
return { } ;
}
const { name , description , supported , action , forced , standard } = signal ;
return {
[ number ] : {
name ,
number ,
description ,
supported ,
action ,
forced ,
standard } } ;
} ;
const findSignalByNumber = function ( number , signals ) {
const signal = signals . find ( ( { name } ) => _os . constants . signals [ name ] === number ) ;
if ( signal !== undefined ) {
return signal ;
}
return signals . find ( signalA => signalA . number === number ) ;
} ;
const signalsByNumber = getSignalsByNumber ( ) ; main . signalsByNumber = signalsByNumber ;
const { signalsByName } = main ;
const getErrorPrefix = ( { timedOut , timeout , errorCode , signal , signalDescription , exitCode , isCanceled } ) => {
if ( timedOut ) {
return ` timed out after ${ timeout } milliseconds ` ;
}
if ( isCanceled ) {
return 'was canceled' ;
}
if ( errorCode !== undefined ) {
return ` failed with ${ errorCode } ` ;
}
if ( signal !== undefined ) {
return ` was killed with ${ signal } ( ${ signalDescription } ) ` ;
}
if ( exitCode !== undefined ) {
return ` failed with exit code ${ exitCode } ` ;
}
return 'failed' ;
} ;
const makeError$1 = ( {
stdout ,
stderr ,
all ,
error ,
signal ,
exitCode ,
command ,
escapedCommand ,
timedOut ,
isCanceled ,
killed ,
parsed : { options : { timeout } }
} ) => {
// `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`.
// We normalize them to `undefined`
exitCode = exitCode === null ? undefined : exitCode ;
signal = signal === null ? undefined : signal ;
const signalDescription = signal === undefined ? undefined : signalsByName [ signal ] . description ;
const errorCode = error && error . code ;
const prefix = getErrorPrefix ( { timedOut , timeout , errorCode , signal , signalDescription , exitCode , isCanceled } ) ;
const execaMessage = ` Command ${ prefix } : ${ command } ` ;
const isError = Object . prototype . toString . call ( error ) === '[object Error]' ;
const shortMessage = isError ? ` ${ execaMessage } \n ${ error . message } ` : execaMessage ;
const message = [ shortMessage , stderr , stdout ] . filter ( Boolean ) . join ( '\n' ) ;
if ( isError ) {
error . originalMessage = error . message ;
error . message = message ;
} else {
error = new Error ( message ) ;
}
error . shortMessage = shortMessage ;
error . command = command ;
error . escapedCommand = escapedCommand ;
error . exitCode = exitCode ;
error . signal = signal ;
error . signalDescription = signalDescription ;
error . stdout = stdout ;
error . stderr = stderr ;
if ( all !== undefined ) {
error . all = all ;
}
if ( 'bufferedData' in error ) {
delete error . bufferedData ;
}
error . failed = true ;
error . timedOut = Boolean ( timedOut ) ;
error . isCanceled = isCanceled ;
error . killed = killed && ! timedOut ;
return error ;
} ;
var error = makeError$1 ;
var stdio = { exports : { } } ;
const aliases = [ 'stdin' , 'stdout' , 'stderr' ] ;
const hasAlias = options => aliases . some ( alias => options [ alias ] !== undefined ) ;
const normalizeStdio$1 = options => {
if ( ! options ) {
return ;
}
const { stdio } = options ;
if ( stdio === undefined ) {
return aliases . map ( alias => options [ alias ] ) ;
}
if ( hasAlias ( options ) ) {
throw new Error ( ` It's not possible to provide \` stdio \` in combination with one of ${ aliases . map ( alias => ` \` ${ alias } \` ` ) . join ( ', ' ) } ` ) ;
}
if ( typeof stdio === 'string' ) {
return stdio ;
}
if ( ! Array . isArray ( stdio ) ) {
throw new TypeError ( ` Expected \` stdio \` to be of type \` string \` or \` Array \` , got \` ${ typeof stdio } \` ` ) ;
}
const length = Math . max ( stdio . length , aliases . length ) ;
return Array . from ( { length } , ( value , index ) => stdio [ index ] ) ;
} ;
stdio . exports = normalizeStdio$1 ;
// `ipc` is pushed unless it is already present
stdio . exports . node = options => {
const stdio = normalizeStdio$1 ( options ) ;
if ( stdio === 'ipc' ) {
return 'ipc' ;
}
if ( stdio === undefined || typeof stdio === 'string' ) {
return [ stdio , stdio , stdio , 'ipc' ] ;
}
if ( stdio . includes ( 'ipc' ) ) {
return stdio ;
}
return [ ... stdio , 'ipc' ] ;
} ;
var stdioExports = stdio . exports ;
var signalExit = { exports : { } } ;
var signals$1 = { exports : { } } ;
var hasRequiredSignals ;
function requireSignals ( ) {
if ( hasRequiredSignals ) return signals$1 . exports ;
hasRequiredSignals = 1 ;
( function ( module ) {
// This is not the set of all possible signals.
//
// It IS, however, the set of all signals that trigger
// an exit on either Linux or BSD systems. Linux is a
// superset of the signal names supported on BSD, and
// the unknown signals just fail to register, so we can
// catch that easily enough.
//
// Don't bother with SIGKILL. It's uncatchable, which
// means that we can't fire any callbacks anyway.
//
// If a user does happen to register a handler on a non-
// fatal signal like SIGWINCH or something, and then
// exit, it'll end up firing `process.emit('exit')`, so
// the handler will be fired anyway.
//
// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
// artificially, inherently leave the process in a
// state from which it is not safe to try and enter JS
// listeners.
module . exports = [
'SIGABRT' ,
'SIGALRM' ,
'SIGHUP' ,
'SIGINT' ,
'SIGTERM'
] ;
if ( process . platform !== 'win32' ) {
module . exports . push (
'SIGVTALRM' ,
'SIGXCPU' ,
'SIGXFSZ' ,
'SIGUSR2' ,
'SIGTRAP' ,
'SIGSYS' ,
'SIGQUIT' ,
'SIGIOT'
// should detect profiler and enable/disable accordingly.
// see #21
// 'SIGPROF'
) ;
}
if ( process . platform === 'linux' ) {
module . exports . push (
'SIGIO' ,
'SIGPOLL' ,
'SIGPWR' ,
'SIGSTKFLT' ,
'SIGUNUSED'
) ;
}
} ( signals$1 ) ) ;
return signals$1 . exports ;
}
// Note: since nyc uses this module to output coverage, any lines
// that are in the direct sync flow of nyc's outputCoverage are
// ignored, since we can never get coverage for them.
// grab a reference to node's real process object right away
var process$1 = commonjsGlobal . process ;
const processOk = function ( process ) {
return process &&
typeof process === 'object' &&
typeof process . removeListener === 'function' &&
typeof process . emit === 'function' &&
typeof process . reallyExit === 'function' &&
typeof process . listeners === 'function' &&
typeof process . kill === 'function' &&
typeof process . pid === 'number' &&
typeof process . on === 'function'
} ;
// some kind of non-node environment, just no-op
/* istanbul ignore if */
if ( ! processOk ( process$1 ) ) {
signalExit . exports = function ( ) {
return function ( ) { }
} ;
} else {
var assert = require$$0$4 ;
var signals = requireSignals ( ) ;
var isWin = /^win/i . test ( process$1 . platform ) ;
var EE = require$$2 ;
/* istanbul ignore if */
if ( typeof EE !== 'function' ) {
EE = EE . EventEmitter ;
}
var emitter ;
if ( process$1 . _ _signal _exit _emitter _ _ ) {
emitter = process$1 . _ _signal _exit _emitter _ _ ;
} else {
emitter = process$1 . _ _signal _exit _emitter _ _ = new EE ( ) ;
emitter . count = 0 ;
emitter . emitted = { } ;
}
// Because this emitter is a global, we have to check to see if a
// previous version of this library failed to enable infinite listeners.
// I know what you're about to say. But literally everything about
// signal-exit is a compromise with evil. Get used to it.
if ( ! emitter . infinite ) {
emitter . setMaxListeners ( Infinity ) ;
emitter . infinite = true ;
}
signalExit . exports = function ( cb , opts ) {
/* istanbul ignore if */
if ( ! processOk ( commonjsGlobal . process ) ) {
return function ( ) { }
}
assert . equal ( typeof cb , 'function' , 'a callback must be provided for exit handler' ) ;
if ( loaded === false ) {
load ( ) ;
}
var ev = 'exit' ;
if ( opts && opts . alwaysLast ) {
ev = 'afterexit' ;
}
var remove = function ( ) {
emitter . removeListener ( ev , cb ) ;
if ( emitter . listeners ( 'exit' ) . length === 0 &&
emitter . listeners ( 'afterexit' ) . length === 0 ) {
unload ( ) ;
}
} ;
emitter . on ( ev , cb ) ;
return remove
} ;
var unload = function unload ( ) {
if ( ! loaded || ! processOk ( commonjsGlobal . process ) ) {
return
}
loaded = false ;
signals . forEach ( function ( sig ) {
try {
process$1 . removeListener ( sig , sigListeners [ sig ] ) ;
} catch ( er ) { }
} ) ;
process$1 . emit = originalProcessEmit ;
process$1 . reallyExit = originalProcessReallyExit ;
emitter . count -= 1 ;
} ;
signalExit . exports . unload = unload ;
var emit = function emit ( event , code , signal ) {
/* istanbul ignore if */
if ( emitter . emitted [ event ] ) {
return
}
emitter . emitted [ event ] = true ;
emitter . emit ( event , code , signal ) ;
} ;
// { <signal>: <listener fn>, ... }
var sigListeners = { } ;
signals . forEach ( function ( sig ) {
sigListeners [ sig ] = function listener ( ) {
/* istanbul ignore if */
if ( ! processOk ( commonjsGlobal . process ) ) {
return
}
// If there are no other listeners, an exit is coming!
// Simplest way: remove us and then re-send the signal.
// We know that this will kill the process, so we can
// safely emit now.
var listeners = process$1 . listeners ( sig ) ;
if ( listeners . length === emitter . count ) {
unload ( ) ;
emit ( 'exit' , null , sig ) ;
/* istanbul ignore next */
emit ( 'afterexit' , null , sig ) ;
/* istanbul ignore next */
if ( isWin && sig === 'SIGHUP' ) {
// "SIGHUP" throws an `ENOSYS` error on Windows,
// so use a supported signal instead
sig = 'SIGINT' ;
}
/* istanbul ignore next */
process$1 . kill ( process$1 . pid , sig ) ;
}
} ;
} ) ;
signalExit . exports . signals = function ( ) {
return signals
} ;
var loaded = false ;
var load = function load ( ) {
if ( loaded || ! processOk ( commonjsGlobal . process ) ) {
return
}
loaded = true ;
// This is the number of onSignalExit's that are in play.
// It's important so that we can count the correct number of
// listeners on signals, and don't wait for the other one to
// handle it instead of us.
emitter . count += 1 ;
signals = signals . filter ( function ( sig ) {
try {
process$1 . on ( sig , sigListeners [ sig ] ) ;
return true
} catch ( er ) {
return false
}
} ) ;
process$1 . emit = processEmit ;
process$1 . reallyExit = processReallyExit ;
} ;
signalExit . exports . load = load ;
var originalProcessReallyExit = process$1 . reallyExit ;
var processReallyExit = function processReallyExit ( code ) {
/* istanbul ignore if */
if ( ! processOk ( commonjsGlobal . process ) ) {
return
}
process$1 . exitCode = code || /* istanbul ignore next */ 0 ;
emit ( 'exit' , process$1 . exitCode , null ) ;
/* istanbul ignore next */
emit ( 'afterexit' , process$1 . exitCode , null ) ;
/* istanbul ignore next */
originalProcessReallyExit . call ( process$1 , process$1 . exitCode ) ;
} ;
var originalProcessEmit = process$1 . emit ;
var processEmit = function processEmit ( ev , arg ) {
if ( ev === 'exit' && processOk ( commonjsGlobal . process ) ) {
/* istanbul ignore else */
if ( arg !== undefined ) {
process$1 . exitCode = arg ;
}
var ret = originalProcessEmit . apply ( this , arguments ) ;
/* istanbul ignore next */
emit ( 'exit' , process$1 . exitCode , null ) ;
/* istanbul ignore next */
emit ( 'afterexit' , process$1 . exitCode , null ) ;
/* istanbul ignore next */
return ret
} else {
return originalProcessEmit . apply ( this , arguments )
}
} ;
}
var signalExitExports = signalExit . exports ;
const os = require$$0$3 ;
const onExit = signalExitExports ;
const DEFAULT _FORCE _KILL _TIMEOUT = 1000 * 5 ;
// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior
const spawnedKill$1 = ( kill , signal = 'SIGTERM' , options = { } ) => {
const killResult = kill ( signal ) ;
setKillTimeout ( kill , signal , options , killResult ) ;
return killResult ;
} ;
const setKillTimeout = ( kill , signal , options , killResult ) => {
if ( ! shouldForceKill ( signal , options , killResult ) ) {
return ;
}
const timeout = getForceKillAfterTimeout ( options ) ;
const t = setTimeout ( ( ) => {
kill ( 'SIGKILL' ) ;
} , timeout ) ;
// Guarded because there's no `.unref()` when `execa` is used in the renderer
// process in Electron. This cannot be tested since we don't run tests in
// Electron.
// istanbul ignore else
if ( t . unref ) {
t . unref ( ) ;
}
} ;
const shouldForceKill = ( signal , { forceKillAfterTimeout } , killResult ) => {
return isSigterm ( signal ) && forceKillAfterTimeout !== false && killResult ;
} ;
const isSigterm = signal => {
return signal === os . constants . signals . SIGTERM ||
( typeof signal === 'string' && signal . toUpperCase ( ) === 'SIGTERM' ) ;
} ;
const getForceKillAfterTimeout = ( { forceKillAfterTimeout = true } ) => {
if ( forceKillAfterTimeout === true ) {
return DEFAULT _FORCE _KILL _TIMEOUT ;
}
if ( ! Number . isFinite ( forceKillAfterTimeout ) || forceKillAfterTimeout < 0 ) {
throw new TypeError ( ` Expected the \` forceKillAfterTimeout \` option to be a non-negative integer, got \` ${ forceKillAfterTimeout } \` ( ${ typeof forceKillAfterTimeout } ) ` ) ;
}
return forceKillAfterTimeout ;
} ;
// `childProcess.cancel()`
const spawnedCancel$1 = ( spawned , context ) => {
const killResult = spawned . kill ( ) ;
if ( killResult ) {
context . isCanceled = true ;
}
} ;
const timeoutKill = ( spawned , signal , reject ) => {
spawned . kill ( signal ) ;
reject ( Object . assign ( new Error ( 'Timed out' ) , { timedOut : true , signal } ) ) ;
} ;
// `timeout` option handling
const setupTimeout$1 = ( spawned , { timeout , killSignal = 'SIGTERM' } , spawnedPromise ) => {
if ( timeout === 0 || timeout === undefined ) {
return spawnedPromise ;
}
let timeoutId ;
const timeoutPromise = new Promise ( ( resolve , reject ) => {
timeoutId = setTimeout ( ( ) => {
timeoutKill ( spawned , killSignal , reject ) ;
} , timeout ) ;
} ) ;
const safeSpawnedPromise = spawnedPromise . finally ( ( ) => {
clearTimeout ( timeoutId ) ;
} ) ;
return Promise . race ( [ timeoutPromise , safeSpawnedPromise ] ) ;
} ;
const validateTimeout$1 = ( { timeout } ) => {
if ( timeout !== undefined && ( ! Number . isFinite ( timeout ) || timeout < 0 ) ) {
throw new TypeError ( ` Expected the \` timeout \` option to be a non-negative integer, got \` ${ timeout } \` ( ${ typeof timeout } ) ` ) ;
}
} ;
// `cleanup` option handling
const setExitHandler$1 = async ( spawned , { cleanup , detached } , timedPromise ) => {
if ( ! cleanup || detached ) {
return timedPromise ;
}
const removeExitHandler = onExit ( ( ) => {
spawned . kill ( ) ;
} ) ;
return timedPromise . finally ( ( ) => {
removeExitHandler ( ) ;
} ) ;
} ;
var kill = {
spawnedKill : spawnedKill$1 ,
spawnedCancel : spawnedCancel$1 ,
setupTimeout : setupTimeout$1 ,
validateTimeout : validateTimeout$1 ,
setExitHandler : setExitHandler$1
} ;
const isStream$1 = stream =>
stream !== null &&
typeof stream === 'object' &&
typeof stream . pipe === 'function' ;
isStream$1 . writable = stream =>
isStream$1 ( stream ) &&
stream . writable !== false &&
typeof stream . _write === 'function' &&
typeof stream . _writableState === 'object' ;
isStream$1 . readable = stream =>
isStream$1 ( stream ) &&
stream . readable !== false &&
typeof stream . _read === 'function' &&
typeof stream . _readableState === 'object' ;
isStream$1 . duplex = stream =>
isStream$1 . writable ( stream ) &&
isStream$1 . readable ( stream ) ;
isStream$1 . transform = stream =>
isStream$1 . duplex ( stream ) &&
typeof stream . _transform === 'function' ;
var isStream _1 = isStream$1 ;
var getStream$2 = { exports : { } } ;
const { PassThrough : PassThroughStream } = require$$0$5 ;
var bufferStream$1 = options => {
options = { ... options } ;
const { array } = options ;
let { encoding } = options ;
const isBuffer = encoding === 'buffer' ;
let objectMode = false ;
if ( array ) {
objectMode = ! ( encoding || isBuffer ) ;
} else {
encoding = encoding || 'utf8' ;
}
if ( isBuffer ) {
encoding = null ;
}
const stream = new PassThroughStream ( { objectMode } ) ;
if ( encoding ) {
stream . setEncoding ( encoding ) ;
}
let length = 0 ;
const chunks = [ ] ;
stream . on ( 'data' , chunk => {
chunks . push ( chunk ) ;
if ( objectMode ) {
length = chunks . length ;
} else {
length += chunk . length ;
}
} ) ;
stream . getBufferedValue = ( ) => {
if ( array ) {
return chunks ;
}
return isBuffer ? Buffer . concat ( chunks , length ) : chunks . join ( '' ) ;
} ;
stream . getBufferedLength = ( ) => length ;
return stream ;
} ;
const { constants : BufferConstants } = require$$0$6 ;
2026-05-06 17:32:44 +08:00
const stream$1 = require$$0$5 ;
2025-08-02 12:09:34 +08:00
const { promisify } = require$$2$1 ;
const bufferStream = bufferStream$1 ;
2026-05-06 17:32:44 +08:00
const streamPipelinePromisified = promisify ( stream$1 . pipeline ) ;
2025-08-02 12:09:34 +08:00
class MaxBufferError extends Error {
constructor ( ) {
super ( 'maxBuffer exceeded' ) ;
this . name = 'MaxBufferError' ;
}
}
async function getStream$1 ( inputStream , options ) {
if ( ! inputStream ) {
throw new Error ( 'Expected a stream' ) ;
}
options = {
maxBuffer : Infinity ,
... options
} ;
const { maxBuffer } = options ;
const stream = bufferStream ( options ) ;
await new Promise ( ( resolve , reject ) => {
const rejectPromise = error => {
// Don't retrieve an oversized buffer.
if ( error && stream . getBufferedLength ( ) <= BufferConstants . MAX _LENGTH ) {
error . bufferedData = stream . getBufferedValue ( ) ;
}
reject ( error ) ;
} ;
( async ( ) => {
try {
await streamPipelinePromisified ( inputStream , stream ) ;
resolve ( ) ;
} catch ( error ) {
rejectPromise ( error ) ;
}
} ) ( ) ;
stream . on ( 'data' , ( ) => {
if ( stream . getBufferedLength ( ) > maxBuffer ) {
rejectPromise ( new MaxBufferError ( ) ) ;
}
} ) ;
} ) ;
return stream . getBufferedValue ( ) ;
}
getStream$2 . exports = getStream$1 ;
getStream$2 . exports . buffer = ( stream , options ) => getStream$1 ( stream , { ... options , encoding : 'buffer' } ) ;
getStream$2 . exports . array = ( stream , options ) => getStream$1 ( stream , { ... options , array : true } ) ;
getStream$2 . exports . MaxBufferError = MaxBufferError ;
var getStreamExports = getStream$2 . exports ;
const { PassThrough } = require$$0$5 ;
var mergeStream$1 = function ( /*streams...*/ ) {
var sources = [ ] ;
var output = new PassThrough ( { objectMode : true } ) ;
output . setMaxListeners ( 0 ) ;
output . add = add ;
output . isEmpty = isEmpty ;
output . on ( 'unpipe' , remove ) ;
Array . prototype . slice . call ( arguments ) . forEach ( add ) ;
return output
function add ( source ) {
if ( Array . isArray ( source ) ) {
source . forEach ( add ) ;
return this
}
sources . push ( source ) ;
source . once ( 'end' , remove . bind ( null , source ) ) ;
source . once ( 'error' , output . emit . bind ( output , 'error' ) ) ;
source . pipe ( output , { end : false } ) ;
return this
}
function isEmpty ( ) {
return sources . length == 0 ;
}
function remove ( source ) {
sources = sources . filter ( function ( it ) { return it !== source } ) ;
if ( ! sources . length && output . readable ) { output . end ( ) ; }
}
} ;
const isStream = isStream _1 ;
const getStream = getStreamExports ;
const mergeStream = mergeStream$1 ;
// `input` option
const handleInput$1 = ( spawned , input ) => {
// Checking for stdin is workaround for https://github.com/nodejs/node/issues/26852
// @todo remove `|| spawned.stdin === undefined` once we drop support for Node.js <=12.2.0
if ( input === undefined || spawned . stdin === undefined ) {
return ;
}
if ( isStream ( input ) ) {
input . pipe ( spawned . stdin ) ;
} else {
spawned . stdin . end ( input ) ;
}
} ;
// `all` interleaves `stdout` and `stderr`
const makeAllStream$1 = ( spawned , { all } ) => {
if ( ! all || ( ! spawned . stdout && ! spawned . stderr ) ) {
return ;
}
const mixed = mergeStream ( ) ;
if ( spawned . stdout ) {
mixed . add ( spawned . stdout ) ;
}
if ( spawned . stderr ) {
mixed . add ( spawned . stderr ) ;
}
return mixed ;
} ;
// On failure, `result.stdout|stderr|all` should contain the currently buffered stream
const getBufferedData = async ( stream , streamPromise ) => {
if ( ! stream ) {
return ;
}
stream . destroy ( ) ;
try {
return await streamPromise ;
} catch ( error ) {
return error . bufferedData ;
}
} ;
const getStreamPromise = ( stream , { encoding , buffer , maxBuffer } ) => {
if ( ! stream || ! buffer ) {
return ;
}
if ( encoding ) {
return getStream ( stream , { encoding , maxBuffer } ) ;
}
return getStream . buffer ( stream , { maxBuffer } ) ;
} ;
// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)
const getSpawnedResult$1 = async ( { stdout , stderr , all } , { encoding , buffer , maxBuffer } , processDone ) => {
const stdoutPromise = getStreamPromise ( stdout , { encoding , buffer , maxBuffer } ) ;
const stderrPromise = getStreamPromise ( stderr , { encoding , buffer , maxBuffer } ) ;
const allPromise = getStreamPromise ( all , { encoding , buffer , maxBuffer : maxBuffer * 2 } ) ;
try {
return await Promise . all ( [ processDone , stdoutPromise , stderrPromise , allPromise ] ) ;
} catch ( error ) {
return Promise . all ( [
{ error , signal : error . signal , timedOut : error . timedOut } ,
getBufferedData ( stdout , stdoutPromise ) ,
getBufferedData ( stderr , stderrPromise ) ,
getBufferedData ( all , allPromise )
] ) ;
}
} ;
const validateInputSync$1 = ( { input } ) => {
if ( isStream ( input ) ) {
throw new TypeError ( 'The `input` option cannot be a stream in sync mode' ) ;
}
} ;
2026-05-06 17:32:44 +08:00
var stream = {
2025-08-02 12:09:34 +08:00
handleInput : handleInput$1 ,
makeAllStream : makeAllStream$1 ,
getSpawnedResult : getSpawnedResult$1 ,
validateInputSync : validateInputSync$1
} ;
const nativePromisePrototype = ( async ( ) => { } ) ( ) . constructor . prototype ;
const descriptors = [ 'then' , 'catch' , 'finally' ] . map ( property => [
property ,
Reflect . getOwnPropertyDescriptor ( nativePromisePrototype , property )
] ) ;
// The return value is a mixin of `childProcess` and `Promise`
const mergePromise$1 = ( spawned , promise ) => {
for ( const [ property , descriptor ] of descriptors ) {
// Starting the main `promise` is deferred to avoid consuming streams
const value = typeof promise === 'function' ?
( ... args ) => Reflect . apply ( descriptor . value , promise ( ) , args ) :
descriptor . value . bind ( promise ) ;
Reflect . defineProperty ( spawned , property , { ... descriptor , value } ) ;
}
return spawned ;
} ;
// Use promises instead of `child_process` events
const getSpawnedPromise$1 = spawned => {
return new Promise ( ( resolve , reject ) => {
spawned . on ( 'exit' , ( exitCode , signal ) => {
resolve ( { exitCode , signal } ) ;
} ) ;
spawned . on ( 'error' , error => {
reject ( error ) ;
} ) ;
if ( spawned . stdin ) {
spawned . stdin . on ( 'error' , error => {
reject ( error ) ;
} ) ;
}
} ) ;
} ;
var promise = {
mergePromise : mergePromise$1 ,
getSpawnedPromise : getSpawnedPromise$1
} ;
const normalizeArgs = ( file , args = [ ] ) => {
if ( ! Array . isArray ( args ) ) {
return [ file ] ;
}
return [ file , ... args ] ;
} ;
const NO _ESCAPE _REGEXP = /^[\w.-]+$/ ;
const DOUBLE _QUOTES _REGEXP = /"/g ;
const escapeArg = arg => {
if ( typeof arg !== 'string' || NO _ESCAPE _REGEXP . test ( arg ) ) {
return arg ;
}
return ` " ${ arg . replace ( DOUBLE _QUOTES _REGEXP , '\\"' ) } " ` ;
} ;
const joinCommand$1 = ( file , args ) => {
return normalizeArgs ( file , args ) . join ( ' ' ) ;
} ;
const getEscapedCommand$1 = ( file , args ) => {
return normalizeArgs ( file , args ) . map ( arg => escapeArg ( arg ) ) . join ( ' ' ) ;
} ;
const SPACES _REGEXP = / +/g ;
// Handle `execa.command()`
const parseCommand$1 = command => {
const tokens = [ ] ;
for ( const token of command . trim ( ) . split ( SPACES _REGEXP ) ) {
// Allow spaces to be escaped by a backslash if not meant as a delimiter
const previousToken = tokens [ tokens . length - 1 ] ;
if ( previousToken && previousToken . endsWith ( '\\' ) ) {
// Merge previous token with current one
tokens [ tokens . length - 1 ] = ` ${ previousToken . slice ( 0 , - 1 ) } ${ token } ` ;
} else {
tokens . push ( token ) ;
}
}
return tokens ;
} ;
var command = {
joinCommand : joinCommand$1 ,
getEscapedCommand : getEscapedCommand$1 ,
parseCommand : parseCommand$1
} ;
const path = require$$0$1 ;
const childProcess = require$$0$2 ;
const crossSpawn = crossSpawnExports ;
const stripFinalNewline = stripFinalNewline$1 ;
const npmRunPath = npmRunPathExports ;
const onetime = onetimeExports ;
const makeError = error ;
const normalizeStdio = stdioExports ;
const { spawnedKill , spawnedCancel , setupTimeout , validateTimeout , setExitHandler } = kill ;
2026-05-06 17:32:44 +08:00
const { handleInput , getSpawnedResult , makeAllStream , validateInputSync } = stream ;
2025-08-02 12:09:34 +08:00
const { mergePromise , getSpawnedPromise } = promise ;
const { joinCommand , parseCommand , getEscapedCommand } = command ;
const DEFAULT _MAX _BUFFER = 1000 * 1000 * 100 ;
const getEnv = ( { env : envOption , extendEnv , preferLocal , localDir , execPath } ) => {
const env = extendEnv ? { ... process . env , ... envOption } : envOption ;
if ( preferLocal ) {
return npmRunPath . env ( { env , cwd : localDir , execPath } ) ;
}
return env ;
} ;
const handleArguments = ( file , args , options = { } ) => {
const parsed = crossSpawn . _parse ( file , args , options ) ;
file = parsed . command ;
args = parsed . args ;
options = parsed . options ;
options = {
maxBuffer : DEFAULT _MAX _BUFFER ,
buffer : true ,
stripFinalNewline : true ,
extendEnv : true ,
preferLocal : false ,
localDir : options . cwd || process . cwd ( ) ,
execPath : process . execPath ,
encoding : 'utf8' ,
reject : true ,
cleanup : true ,
all : false ,
windowsHide : true ,
... options
} ;
options . env = getEnv ( options ) ;
options . stdio = normalizeStdio ( options ) ;
if ( process . platform === 'win32' && path . basename ( file , '.exe' ) === 'cmd' ) {
// #116
args . unshift ( '/q' ) ;
}
return { file , args , options , parsed } ;
} ;
const handleOutput = ( options , value , error ) => {
if ( typeof value !== 'string' && ! Buffer . isBuffer ( value ) ) {
// When `execa.sync()` errors, we normalize it to '' to mimic `execa()`
return error === undefined ? undefined : '' ;
}
if ( options . stripFinalNewline ) {
return stripFinalNewline ( value ) ;
}
return value ;
} ;
const execa = ( file , args , options ) => {
const parsed = handleArguments ( file , args , options ) ;
const command = joinCommand ( file , args ) ;
const escapedCommand = getEscapedCommand ( file , args ) ;
validateTimeout ( parsed . options ) ;
let spawned ;
try {
spawned = childProcess . spawn ( parsed . file , parsed . args , parsed . options ) ;
} catch ( error ) {
// Ensure the returned error is always both a promise and a child process
const dummySpawned = new childProcess . ChildProcess ( ) ;
const errorPromise = Promise . reject ( makeError ( {
error ,
stdout : '' ,
stderr : '' ,
all : '' ,
command ,
escapedCommand ,
parsed ,
timedOut : false ,
isCanceled : false ,
killed : false
} ) ) ;
return mergePromise ( dummySpawned , errorPromise ) ;
}
const spawnedPromise = getSpawnedPromise ( spawned ) ;
const timedPromise = setupTimeout ( spawned , parsed . options , spawnedPromise ) ;
const processDone = setExitHandler ( spawned , parsed . options , timedPromise ) ;
const context = { isCanceled : false } ;
spawned . kill = spawnedKill . bind ( null , spawned . kill . bind ( spawned ) ) ;
spawned . cancel = spawnedCancel . bind ( null , spawned , context ) ;
const handlePromise = async ( ) => {
const [ { error , exitCode , signal , timedOut } , stdoutResult , stderrResult , allResult ] = await getSpawnedResult ( spawned , parsed . options , processDone ) ;
const stdout = handleOutput ( parsed . options , stdoutResult ) ;
const stderr = handleOutput ( parsed . options , stderrResult ) ;
const all = handleOutput ( parsed . options , allResult ) ;
if ( error || exitCode !== 0 || signal !== null ) {
const returnedError = makeError ( {
error ,
exitCode ,
signal ,
stdout ,
stderr ,
all ,
command ,
escapedCommand ,
parsed ,
timedOut ,
isCanceled : context . isCanceled ,
killed : spawned . killed
} ) ;
if ( ! parsed . options . reject ) {
return returnedError ;
}
throw returnedError ;
}
return {
command ,
escapedCommand ,
exitCode : 0 ,
stdout ,
stderr ,
all ,
failed : false ,
timedOut : false ,
isCanceled : false ,
killed : false
} ;
} ;
const handlePromiseOnce = onetime ( handlePromise ) ;
handleInput ( spawned , parsed . options . input ) ;
spawned . all = makeAllStream ( spawned , parsed . options ) ;
return mergePromise ( spawned , handlePromiseOnce ) ;
} ;
execa$2 . exports = execa ;
execa$2 . exports . sync = ( file , args , options ) => {
const parsed = handleArguments ( file , args , options ) ;
const command = joinCommand ( file , args ) ;
const escapedCommand = getEscapedCommand ( file , args ) ;
validateInputSync ( parsed . options ) ;
let result ;
try {
result = childProcess . spawnSync ( parsed . file , parsed . args , parsed . options ) ;
} catch ( error ) {
throw makeError ( {
error ,
stdout : '' ,
stderr : '' ,
all : '' ,
command ,
escapedCommand ,
parsed ,
timedOut : false ,
isCanceled : false ,
killed : false
} ) ;
}
const stdout = handleOutput ( parsed . options , result . stdout , result . error ) ;
const stderr = handleOutput ( parsed . options , result . stderr , result . error ) ;
if ( result . error || result . status !== 0 || result . signal !== null ) {
const error = makeError ( {
stdout ,
stderr ,
error : result . error ,
signal : result . signal ,
exitCode : result . status ,
command ,
escapedCommand ,
parsed ,
timedOut : result . error && result . error . code === 'ETIMEDOUT' ,
isCanceled : false ,
killed : result . signal !== null
} ) ;
if ( ! parsed . options . reject ) {
return error ;
}
throw error ;
}
return {
command ,
escapedCommand ,
exitCode : 0 ,
stdout ,
stderr ,
failed : false ,
timedOut : false ,
isCanceled : false ,
killed : false
} ;
} ;
execa$2 . exports . command = ( command , options ) => {
const [ file , ... args ] = parseCommand ( command ) ;
return execa ( file , args , options ) ;
} ;
execa$2 . exports . commandSync = ( command , options ) => {
const [ file , ... args ] = parseCommand ( command ) ;
return execa . sync ( file , args , options ) ;
} ;
execa$2 . exports . node = ( scriptPath , args , options = { } ) => {
if ( args && ! Array . isArray ( args ) && typeof args === 'object' ) {
options = args ;
args = [ ] ;
}
const stdio = normalizeStdio . node ( options ) ;
const defaultExecArgv = process . execArgv . filter ( arg => ! arg . startsWith ( '--inspect' ) ) ;
const {
nodePath = process . execPath ,
nodeOptions = defaultExecArgv
} = options ;
return execa (
nodePath ,
[
... nodeOptions ,
scriptPath ,
... ( Array . isArray ( args ) ? args : [ ] )
] ,
{
... options ,
stdin : undefined ,
stdout : undefined ,
stderr : undefined ,
stdio ,
shell : false
}
) ;
} ;
var execaExports = execa$2 . exports ;
var execa$1 = /*@__PURE__*/ getDefaultExportFromCjs ( execaExports ) ;
function ansiRegex ( { onlyFirst = false } = { } ) {
2026-05-06 17:32:44 +08:00
// Valid string terminator sequences are BEL, ESC\, and 0x9c
const ST = '(?:\\u0007|\\u001B\\u005C|\\u009C)' ;
2025-08-02 12:09:34 +08:00
const pattern = [
2026-05-06 17:32:44 +08:00
` [ \\ u001B \\ u009B][[ \\ ]()#;?]*(?:(?:(?:(?:;[-a-zA-Z \\ d \\ /#&.:=?%@~_]+)*|[a-zA-Z \\ d]+(?:;[-a-zA-Z \\ d \\ /#&.:=?%@~_]*)*)? ${ ST } ) ` ,
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))' ,
2025-08-02 12:09:34 +08:00
] . join ( '|' ) ;
return new RegExp ( pattern , onlyFirst ? undefined : 'g' ) ;
}
2026-05-06 17:32:44 +08:00
const regex = ansiRegex ( ) ;
2025-08-02 12:09:34 +08:00
function stripAnsi ( string ) {
if ( typeof string !== 'string' ) {
throw new TypeError ( ` Expected a \` string \` , got \` ${ typeof string } \` ` ) ;
}
2026-05-06 17:32:44 +08:00
// Even though the regex is global, we don't need to reset the `.lastIndex`
// because unlike `.exec()` and `.test()`, `.replace()` does it automatically
// and doing it manually has a performance penalty.
return string . replace ( regex , '' ) ;
2025-08-02 12:09:34 +08:00
}
const detectDefaultShell = ( ) => {
const { env } = process$2 ;
if ( process$2 . platform === 'win32' ) {
return env . COMSPEC || 'cmd.exe' ;
}
try {
const { shell } = node _os . userInfo ( ) ;
if ( shell ) {
return shell ;
}
} catch { }
if ( process$2 . platform === 'darwin' ) {
return env . SHELL || '/bin/zsh' ;
}
return env . SHELL || '/bin/sh' ;
} ;
// Stores default shell when imported.
const defaultShell = detectDefaultShell ( ) ;
const args = [
'-ilc' ,
'echo -n "_SHELL_ENV_DELIMITER_"; env; echo -n "_SHELL_ENV_DELIMITER_"; exit' ,
] ;
const env = {
// Disables Oh My Zsh auto-update thing that can block the process.
DISABLE _AUTO _UPDATE : 'true' ,
} ;
const parseEnv = env => {
env = env . split ( '_SHELL_ENV_DELIMITER_' ) [ 1 ] ;
const returnValue = { } ;
for ( const line of stripAnsi ( env ) . split ( '\n' ) . filter ( line => Boolean ( line ) ) ) {
const [ key , ... values ] = line . split ( '=' ) ;
returnValue [ key ] = values . join ( '=' ) ;
}
return returnValue ;
} ;
function shellEnvSync ( shell ) {
if ( process$2 . platform === 'win32' ) {
return process$2 . env ;
}
try {
const { stdout } = execa$1 . sync ( shell || defaultShell , args , { env } ) ;
return parseEnv ( stdout ) ;
} catch ( error ) {
if ( shell ) {
throw error ;
} else {
return process$2 . env ;
}
}
}
function shellPathSync ( ) {
const { PATH } = shellEnvSync ( ) ;
return PATH ;
}
function fixPath ( ) {
if ( process$2 . platform === 'win32' ) {
return ;
}
process$2 . env . PATH = shellPathSync ( ) || [
'./node_modules/.bin' ,
'/.nodebrew/current/bin' ,
'/usr/local/bin' ,
process$2 . env . PATH ,
] . join ( ':' ) ;
}
2026-05-06 17:32:44 +08:00
const IMAGE _EXT _LIST = [
".png" ,
".jpg" ,
".jpeg" ,
".bmp" ,
".gif" ,
".svg" ,
".tiff" ,
".webp" ,
".avif" ,
] ;
function isAnImage ( ext ) {
return IMAGE _EXT _LIST . includes ( ext . toLowerCase ( ) ) ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
function isAssetTypeAnImage ( path ) {
return isAnImage ( pathBrowserify . extname ( path ) ) ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
function getOS ( ) {
const { appVersion } = navigator ;
if ( appVersion . indexOf ( "Win" ) !== - 1 ) {
return "Windows" ;
}
else if ( appVersion . indexOf ( "Mac" ) !== - 1 ) {
return "MacOS" ;
}
else if ( appVersion . indexOf ( "X11" ) !== - 1 ) {
return "Linux" ;
}
else {
return "Unknown OS" ;
}
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
async function streamToString ( stream ) {
const chunks = [ ] ;
for await ( const chunk of stream ) {
chunks . push ( Buffer . from ( chunk ) ) ;
}
// @ts-ignore
return Buffer . concat ( chunks ) . toString ( "utf-8" ) ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
function getUrlAsset ( url ) {
return ( url = url . substr ( 1 + url . lastIndexOf ( "/" ) ) . split ( "?" ) [ 0 ] ) . split ( "#" ) [ 0 ] ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
function getLastImage ( list ) {
const reversedList = list . reverse ( ) ;
let lastImage ;
reversedList . forEach ( item => {
if ( item && item . startsWith ( "http" ) ) {
lastImage = item ;
return item ;
}
} ) ;
return lastImage ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
function arrayToObject ( arr , key ) {
const obj = { } ;
arr . forEach ( element => {
obj [ element [ key ] ] = element ;
} ) ;
return obj ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
function bufferToArrayBuffer ( buffer ) {
const arrayBuffer = new ArrayBuffer ( buffer . length ) ;
const view = new Uint8Array ( arrayBuffer ) ;
for ( let i = 0 ; i < buffer . length ; i ++ ) {
view [ i ] = buffer [ i ] ;
}
return arrayBuffer ;
}
function uuid ( ) {
return Math . random ( ) . toString ( 36 ) . slice ( 2 ) ;
2025-08-02 12:09:34 +08:00
}
// Primitive types
function dv ( array ) {
return new DataView ( array . buffer , array . byteOffset ) ;
}
/ * *
* 8 - bit unsigned integer
* /
const UINT8 = {
len : 1 ,
get ( array , offset ) {
return dv ( array ) . getUint8 ( offset ) ;
} ,
put ( array , offset , value ) {
dv ( array ) . setUint8 ( offset , value ) ;
return offset + 1 ;
}
} ;
/ * *
* 16 - bit unsigned integer , Little Endian byte order
* /
const UINT16 _LE = {
len : 2 ,
get ( array , offset ) {
return dv ( array ) . getUint16 ( offset , true ) ;
} ,
put ( array , offset , value ) {
dv ( array ) . setUint16 ( offset , value , true ) ;
return offset + 2 ;
}
} ;
/ * *
* 16 - bit unsigned integer , Big Endian byte order
* /
const UINT16 _BE = {
len : 2 ,
get ( array , offset ) {
return dv ( array ) . getUint16 ( offset ) ;
} ,
put ( array , offset , value ) {
dv ( array ) . setUint16 ( offset , value ) ;
return offset + 2 ;
}
} ;
/ * *
* 32 - bit unsigned integer , Little Endian byte order
* /
const UINT32 _LE = {
len : 4 ,
get ( array , offset ) {
return dv ( array ) . getUint32 ( offset , true ) ;
} ,
put ( array , offset , value ) {
dv ( array ) . setUint32 ( offset , value , true ) ;
return offset + 4 ;
}
} ;
/ * *
* 32 - bit unsigned integer , Big Endian byte order
* /
const UINT32 _BE = {
len : 4 ,
get ( array , offset ) {
return dv ( array ) . getUint32 ( offset ) ;
} ,
put ( array , offset , value ) {
dv ( array ) . setUint32 ( offset , value ) ;
return offset + 4 ;
}
} ;
/ * *
* 32 - bit signed integer , Big Endian byte order
* /
const INT32 _BE = {
len : 4 ,
get ( array , offset ) {
return dv ( array ) . getInt32 ( offset ) ;
} ,
put ( array , offset , value ) {
dv ( array ) . setInt32 ( offset , value ) ;
return offset + 4 ;
}
} ;
/ * *
* 64 - bit unsigned integer , Little Endian byte order
* /
const UINT64 _LE = {
len : 8 ,
get ( array , offset ) {
return dv ( array ) . getBigUint64 ( offset , true ) ;
} ,
put ( array , offset , value ) {
dv ( array ) . setBigUint64 ( offset , value , true ) ;
return offset + 8 ;
}
} ;
/ * *
* Consume a fixed number of bytes from the stream and return a string with a specified encoding .
* /
class StringType {
constructor ( len , encoding ) {
this . len = len ;
this . encoding = encoding ;
}
get ( uint8Array , offset ) {
return node _buffer . Buffer . from ( uint8Array ) . toString ( this . encoding , offset , offset + this . len ) ;
}
}
const defaultMessages = 'End-Of-Stream' ;
/ * *
* Thrown on read operation of the end of file or stream has been reached
* /
class EndOfStreamError extends Error {
constructor ( ) {
super ( defaultMessages ) ;
}
}
2026-05-06 17:32:44 +08:00
class Deferred {
constructor ( ) {
this . resolve = ( ) => null ;
this . reject = ( ) => null ;
this . promise = new Promise ( ( resolve , reject ) => {
this . reject = reject ;
this . resolve = resolve ;
} ) ;
}
}
class AbstractStreamReader {
constructor ( ) {
/ * *
* Maximum request length on read - stream operation
* /
this . maxStreamReadSize = 1 * 1024 * 1024 ;
this . endOfStream = false ;
/ * *
* Store peeked data
* @ type { Array }
* /
this . peekQueue = [ ] ;
}
async peek ( uint8Array , offset , length ) {
const bytesRead = await this . read ( uint8Array , offset , length ) ;
this . peekQueue . push ( uint8Array . subarray ( offset , offset + bytesRead ) ) ; // Put read data back to peek buffer
return bytesRead ;
}
async read ( buffer , offset , length ) {
if ( length === 0 ) {
return 0 ;
}
let bytesRead = this . readFromPeekBuffer ( buffer , offset , length ) ;
bytesRead += await this . readRemainderFromStream ( buffer , offset + bytesRead , length - bytesRead ) ;
if ( bytesRead === 0 ) {
throw new EndOfStreamError ( ) ;
}
return bytesRead ;
}
/ * *
* Read chunk from stream
* @ param buffer - Target Uint8Array ( or Buffer ) to store data read from stream in
* @ param offset - Offset target
* @ param length - Number of bytes to read
* @ returns Number of bytes read
* /
readFromPeekBuffer ( buffer , offset , length ) {
let remaining = length ;
let bytesRead = 0 ;
// consume peeked data first
while ( this . peekQueue . length > 0 && remaining > 0 ) {
const peekData = this . peekQueue . pop ( ) ; // Front of queue
if ( ! peekData )
throw new Error ( 'peekData should be defined' ) ;
const lenCopy = Math . min ( peekData . length , remaining ) ;
buffer . set ( peekData . subarray ( 0 , lenCopy ) , offset + bytesRead ) ;
bytesRead += lenCopy ;
remaining -= lenCopy ;
if ( lenCopy < peekData . length ) {
// remainder back to queue
this . peekQueue . push ( peekData . subarray ( lenCopy ) ) ;
}
}
return bytesRead ;
}
async readRemainderFromStream ( buffer , offset , initialRemaining ) {
let remaining = initialRemaining ;
let bytesRead = 0 ;
// Continue reading from stream if required
while ( remaining > 0 && ! this . endOfStream ) {
const reqLen = Math . min ( remaining , this . maxStreamReadSize ) ;
const chunkLen = await this . readFromStream ( buffer , offset + bytesRead , reqLen ) ;
if ( chunkLen === 0 )
break ;
bytesRead += chunkLen ;
remaining -= chunkLen ;
}
return bytesRead ;
}
}
/ * *
* Node . js Readable Stream Reader
* Ref : https : //nodejs.org/api/stream.html#readable-streams
* /
class StreamReader extends AbstractStreamReader {
constructor ( s ) {
super ( ) ;
this . s = s ;
/ * *
* Deferred used for postponed read request ( as not data is yet available to read )
* /
this . deferred = null ;
if ( ! s . read || ! s . once ) {
throw new Error ( 'Expected an instance of stream.Readable' ) ;
}
this . s . once ( 'end' , ( ) => this . reject ( new EndOfStreamError ( ) ) ) ;
this . s . once ( 'error' , err => this . reject ( err ) ) ;
this . s . once ( 'close' , ( ) => this . reject ( new Error ( 'Stream closed' ) ) ) ;
}
/ * *
* Read chunk from stream
* @ param buffer Target Uint8Array ( or Buffer ) to store data read from stream in
* @ param offset Offset target
* @ param length Number of bytes to read
* @ returns Number of bytes read
* /
async readFromStream ( buffer , offset , length ) {
if ( this . endOfStream ) {
return 0 ;
}
const readBuffer = this . s . read ( length ) ;
if ( readBuffer ) {
buffer . set ( readBuffer , offset ) ;
return readBuffer . length ;
}
const request = {
buffer ,
offset ,
length ,
deferred : new Deferred ( )
} ;
this . deferred = request . deferred ;
this . s . once ( 'readable' , ( ) => {
this . readDeferred ( request ) ;
} ) ;
return request . deferred . promise ;
}
/ * *
* Process deferred read request
* @ param request Deferred read request
* /
readDeferred ( request ) {
const readBuffer = this . s . read ( request . length ) ;
if ( readBuffer ) {
request . buffer . set ( readBuffer , request . offset ) ;
request . deferred . resolve ( readBuffer . length ) ;
this . deferred = null ;
}
else {
this . s . once ( 'readable' , ( ) => {
this . readDeferred ( request ) ;
} ) ;
}
}
reject ( err ) {
this . endOfStream = true ;
if ( this . deferred ) {
this . deferred . reject ( err ) ;
this . deferred = null ;
}
}
async abort ( ) {
this . s . destroy ( ) ;
}
}
2025-08-02 12:09:34 +08:00
/ * *
* Core tokenizer
* /
class AbstractTokenizer {
constructor ( fileInfo ) {
/ * *
* Tokenizer - stream position
* /
this . position = 0 ;
this . numBuffer = new Uint8Array ( 8 ) ;
this . fileInfo = fileInfo ? fileInfo : { } ;
}
/ * *
* Read a token from the tokenizer - stream
* @ param token - The token to read
* @ param position - If provided , the desired position in the tokenizer - stream
* @ returns Promise with token data
* /
async readToken ( token , position = this . position ) {
2026-05-06 17:32:44 +08:00
const uint8Array = new Uint8Array ( token . len ) ;
2025-08-02 12:09:34 +08:00
const len = await this . readBuffer ( uint8Array , { position } ) ;
if ( len < token . len )
throw new EndOfStreamError ( ) ;
return token . get ( uint8Array , 0 ) ;
}
/ * *
* Peek a token from the tokenizer - stream .
* @ param token - Token to peek from the tokenizer - stream .
* @ param position - Offset where to begin reading within the file . If position is null , data will be read from the current file position .
* @ returns Promise with token data
* /
async peekToken ( token , position = this . position ) {
2026-05-06 17:32:44 +08:00
const uint8Array = new Uint8Array ( token . len ) ;
2025-08-02 12:09:34 +08:00
const len = await this . peekBuffer ( uint8Array , { position } ) ;
if ( len < token . len )
throw new EndOfStreamError ( ) ;
return token . get ( uint8Array , 0 ) ;
}
/ * *
* Read a numeric token from the stream
* @ param token - Numeric token
* @ returns Promise with number
* /
async readNumber ( token ) {
const len = await this . readBuffer ( this . numBuffer , { length : token . len } ) ;
if ( len < token . len )
throw new EndOfStreamError ( ) ;
return token . get ( this . numBuffer , 0 ) ;
}
/ * *
* Read a numeric token from the stream
* @ param token - Numeric token
* @ returns Promise with number
* /
async peekNumber ( token ) {
const len = await this . peekBuffer ( this . numBuffer , { length : token . len } ) ;
if ( len < token . len )
throw new EndOfStreamError ( ) ;
return token . get ( this . numBuffer , 0 ) ;
}
/ * *
* Ignore number of bytes , advances the pointer in under tokenizer - stream .
* @ param length - Number of bytes to ignore
* @ return resolves the number of bytes ignored , equals length if this available , otherwise the number of bytes available
* /
async ignore ( length ) {
if ( this . fileInfo . size !== undefined ) {
const bytesLeft = this . fileInfo . size - this . position ;
if ( length > bytesLeft ) {
this . position += bytesLeft ;
return bytesLeft ;
}
}
this . position += length ;
return length ;
}
async close ( ) {
// empty
}
normalizeOptions ( uint8Array , options ) {
if ( options && options . position !== undefined && options . position < this . position ) {
throw new Error ( '`options.position` must be equal or greater than `tokenizer.position`' ) ;
}
if ( options ) {
return {
mayBeLess : options . mayBeLess === true ,
offset : options . offset ? options . offset : 0 ,
length : options . length ? options . length : ( uint8Array . length - ( options . offset ? options . offset : 0 ) ) ,
position : options . position ? options . position : this . position
} ;
}
return {
mayBeLess : false ,
offset : 0 ,
length : uint8Array . length ,
position : this . position
} ;
}
}
2026-05-06 17:32:44 +08:00
const maxBufferSize = 256000 ;
class ReadStreamTokenizer extends AbstractTokenizer {
constructor ( streamReader , fileInfo ) {
super ( fileInfo ) ;
this . streamReader = streamReader ;
}
/ * *
* Get file information , an HTTP - client may implement this doing a HEAD request
* @ return Promise with file information
* /
async getFileInfo ( ) {
return this . fileInfo ;
}
/ * *
* Read buffer from tokenizer
* @ param uint8Array - Target Uint8Array to fill with data read from the tokenizer - stream
* @ param options - Read behaviour options
* @ returns Promise with number of bytes read
* /
async readBuffer ( uint8Array , options ) {
const normOptions = this . normalizeOptions ( uint8Array , options ) ;
const skipBytes = normOptions . position - this . position ;
if ( skipBytes > 0 ) {
await this . ignore ( skipBytes ) ;
return this . readBuffer ( uint8Array , options ) ;
}
else if ( skipBytes < 0 ) {
throw new Error ( '`options.position` must be equal or greater than `tokenizer.position`' ) ;
}
if ( normOptions . length === 0 ) {
return 0 ;
}
const bytesRead = await this . streamReader . read ( uint8Array , normOptions . offset , normOptions . length ) ;
this . position += bytesRead ;
if ( ( ! options || ! options . mayBeLess ) && bytesRead < normOptions . length ) {
throw new EndOfStreamError ( ) ;
}
return bytesRead ;
}
/ * *
* Peek ( read ahead ) buffer from tokenizer
* @ param uint8Array - Uint8Array ( or Buffer ) to write data to
* @ param options - Read behaviour options
* @ returns Promise with number of bytes peeked
* /
async peekBuffer ( uint8Array , options ) {
const normOptions = this . normalizeOptions ( uint8Array , options ) ;
let bytesRead = 0 ;
if ( normOptions . position ) {
const skipBytes = normOptions . position - this . position ;
if ( skipBytes > 0 ) {
const skipBuffer = new Uint8Array ( normOptions . length + skipBytes ) ;
bytesRead = await this . peekBuffer ( skipBuffer , { mayBeLess : normOptions . mayBeLess } ) ;
uint8Array . set ( skipBuffer . subarray ( skipBytes ) , normOptions . offset ) ;
return bytesRead - skipBytes ;
}
else if ( skipBytes < 0 ) {
throw new Error ( 'Cannot peek from a negative offset in a stream' ) ;
}
}
if ( normOptions . length > 0 ) {
try {
bytesRead = await this . streamReader . peek ( uint8Array , normOptions . offset , normOptions . length ) ;
}
catch ( err ) {
if ( options && options . mayBeLess && err instanceof EndOfStreamError ) {
return 0 ;
}
throw err ;
}
if ( ( ! normOptions . mayBeLess ) && bytesRead < normOptions . length ) {
throw new EndOfStreamError ( ) ;
}
}
return bytesRead ;
}
async ignore ( length ) {
// debug(`ignore ${this.position}...${this.position + length - 1}`);
const bufSize = Math . min ( maxBufferSize , length ) ;
const buf = new Uint8Array ( bufSize ) ;
let totBytesRead = 0 ;
while ( totBytesRead < length ) {
const remaining = length - totBytesRead ;
const bytesRead = await this . readBuffer ( buf , { length : Math . min ( bufSize , remaining ) } ) ;
if ( bytesRead < 0 ) {
return bytesRead ;
}
totBytesRead += bytesRead ;
}
return totBytesRead ;
}
}
2025-08-02 12:09:34 +08:00
class BufferTokenizer extends AbstractTokenizer {
/ * *
* Construct BufferTokenizer
* @ param uint8Array - Uint8Array to tokenize
* @ param fileInfo - Pass additional file information to the tokenizer
* /
constructor ( uint8Array , fileInfo ) {
super ( fileInfo ) ;
this . uint8Array = uint8Array ;
this . fileInfo . size = this . fileInfo . size ? this . fileInfo . size : uint8Array . length ;
}
/ * *
* Read buffer from tokenizer
* @ param uint8Array - Uint8Array to tokenize
* @ param options - Read behaviour options
* @ returns { Promise < number > }
* /
async readBuffer ( uint8Array , options ) {
if ( options && options . position ) {
if ( options . position < this . position ) {
throw new Error ( '`options.position` must be equal or greater than `tokenizer.position`' ) ;
}
this . position = options . position ;
}
const bytesRead = await this . peekBuffer ( uint8Array , options ) ;
this . position += bytesRead ;
return bytesRead ;
}
/ * *
* Peek ( read ahead ) buffer from tokenizer
* @ param uint8Array
* @ param options - Read behaviour options
* @ returns { Promise < number > }
* /
async peekBuffer ( uint8Array , options ) {
const normOptions = this . normalizeOptions ( uint8Array , options ) ;
const bytes2read = Math . min ( this . uint8Array . length - normOptions . position , normOptions . length ) ;
if ( ( ! normOptions . mayBeLess ) && bytes2read < normOptions . length ) {
throw new EndOfStreamError ( ) ;
}
else {
uint8Array . set ( this . uint8Array . subarray ( normOptions . position , normOptions . position + bytes2read ) , normOptions . offset ) ;
return bytes2read ;
}
}
async close ( ) {
// empty
}
}
2026-05-06 17:32:44 +08:00
/ * *
* Construct ReadStreamTokenizer from given Stream .
* Will set fileSize , if provided given Stream has set the . path property /
* @ param stream - Read from Node . js Stream . Readable
* @ param fileInfo - Pass the file information , like size and MIME - type of the corresponding stream .
* @ returns ReadStreamTokenizer
* /
function fromStream ( stream , fileInfo ) {
fileInfo = fileInfo ? fileInfo : { } ;
return new ReadStreamTokenizer ( new StreamReader ( stream ) , fileInfo ) ;
}
2025-08-02 12:09:34 +08:00
/ * *
* Construct ReadStreamTokenizer from given Buffer .
* @ param uint8Array - Uint8Array to tokenize
* @ param fileInfo - Pass additional file information to the tokenizer
* @ returns BufferTokenizer
* /
function fromBuffer ( uint8Array , fileInfo ) {
return new BufferTokenizer ( uint8Array , fileInfo ) ;
}
function stringToBytes ( string ) {
return [ ... string ] . map ( character => character . charCodeAt ( 0 ) ) ; // eslint-disable-line unicorn/prefer-code-point
}
/ * *
Checks whether the TAR checksum is valid .
@ param { Buffer } buffer - The TAR header ` [offset ... offset + 512] ` .
@ param { number } offset - TAR header offset .
@ returns { boolean } ` true ` if the TAR checksum is valid , otherwise ` false ` .
* /
function tarHeaderChecksumMatches ( buffer , offset = 0 ) {
const readSum = Number . parseInt ( buffer . toString ( 'utf8' , 148 , 154 ) . replace ( /\0.*$/ , '' ) . trim ( ) , 8 ) ; // Read sum in header
if ( Number . isNaN ( readSum ) ) {
return false ;
}
let sum = 8 * 0x20 ; // Initialize signed bit sum
for ( let index = offset ; index < offset + 148 ; index ++ ) {
sum += buffer [ index ] ;
}
for ( let index = offset + 156 ; index < offset + 512 ; index ++ ) {
sum += buffer [ index ] ;
}
return readSum === sum ;
}
/ * *
ID3 UINT32 sync - safe tokenizer token .
28 bits ( representing up to 256 MB ) integer , the msb is 0 to avoid "false syncsignals" .
* /
const uint32SyncSafeToken = {
get : ( buffer , offset ) => ( buffer [ offset + 3 ] & 0x7F ) | ( ( buffer [ offset + 2 ] ) << 7 ) | ( ( buffer [ offset + 1 ] ) << 14 ) | ( ( buffer [ offset ] ) << 21 ) ,
len : 4 ,
} ;
const extensions = [
'jpg' ,
'png' ,
'apng' ,
'gif' ,
'webp' ,
'flif' ,
'xcf' ,
'cr2' ,
'cr3' ,
'orf' ,
'arw' ,
'dng' ,
'nef' ,
'rw2' ,
'raf' ,
'tif' ,
'bmp' ,
'icns' ,
'jxr' ,
'psd' ,
'indd' ,
'zip' ,
'tar' ,
'rar' ,
'gz' ,
'bz2' ,
'7z' ,
'dmg' ,
'mp4' ,
'mid' ,
'mkv' ,
'webm' ,
'mov' ,
'avi' ,
'mpg' ,
'mp2' ,
'mp3' ,
'm4a' ,
'oga' ,
'ogg' ,
'ogv' ,
'opus' ,
'flac' ,
'wav' ,
'spx' ,
'amr' ,
'pdf' ,
'epub' ,
'elf' ,
2026-05-06 17:32:44 +08:00
'macho' ,
2025-08-02 12:09:34 +08:00
'exe' ,
'swf' ,
'rtf' ,
'wasm' ,
'woff' ,
'woff2' ,
'eot' ,
'ttf' ,
'otf' ,
'ico' ,
'flv' ,
'ps' ,
'xz' ,
'sqlite' ,
'nes' ,
'crx' ,
'xpi' ,
'cab' ,
'deb' ,
'ar' ,
'rpm' ,
'Z' ,
'lz' ,
'cfb' ,
'mxf' ,
'mts' ,
'blend' ,
'bpg' ,
'docx' ,
'pptx' ,
'xlsx' ,
'3gp' ,
'3g2' ,
'j2c' ,
'jp2' ,
'jpm' ,
'jpx' ,
'mj2' ,
'aif' ,
'qcp' ,
'odt' ,
'ods' ,
'odp' ,
'xml' ,
'mobi' ,
'heic' ,
'cur' ,
'ktx' ,
'ape' ,
'wv' ,
'dcm' ,
'ics' ,
'glb' ,
'pcap' ,
'dsf' ,
'lnk' ,
'alias' ,
'voc' ,
'ac3' ,
'm4v' ,
'm4p' ,
'm4b' ,
'f4v' ,
'f4p' ,
'f4b' ,
'f4a' ,
'mie' ,
'asf' ,
'ogm' ,
'ogx' ,
'mpc' ,
'arrow' ,
'shp' ,
'aac' ,
'mp1' ,
'it' ,
's3m' ,
'xm' ,
'ai' ,
'skp' ,
'avif' ,
'eps' ,
'lzh' ,
'pgp' ,
'asar' ,
'stl' ,
'chm' ,
'3mf' ,
'zst' ,
'jxl' ,
'vcf' ,
'jls' ,
'pst' ,
'dwg' ,
'parquet' ,
'class' ,
'arj' ,
'cpio' ,
'ace' ,
'avro' ,
2026-05-06 17:32:44 +08:00
'icc' ,
'fbx' ,
2025-08-02 12:09:34 +08:00
] ;
const mimeTypes = [
'image/jpeg' ,
'image/png' ,
'image/gif' ,
'image/webp' ,
'image/flif' ,
'image/x-xcf' ,
'image/x-canon-cr2' ,
'image/x-canon-cr3' ,
'image/tiff' ,
'image/bmp' ,
'image/vnd.ms-photo' ,
'image/vnd.adobe.photoshop' ,
'application/x-indesign' ,
'application/epub+zip' ,
'application/x-xpinstall' ,
'application/vnd.oasis.opendocument.text' ,
'application/vnd.oasis.opendocument.spreadsheet' ,
'application/vnd.oasis.opendocument.presentation' ,
'application/vnd.openxmlformats-officedocument.wordprocessingml.document' ,
'application/vnd.openxmlformats-officedocument.presentationml.presentation' ,
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ,
'application/zip' ,
'application/x-tar' ,
'application/x-rar-compressed' ,
'application/gzip' ,
'application/x-bzip2' ,
'application/x-7z-compressed' ,
'application/x-apple-diskimage' ,
'application/x-apache-arrow' ,
'video/mp4' ,
'audio/midi' ,
'video/x-matroska' ,
'video/webm' ,
'video/quicktime' ,
'video/vnd.avi' ,
'audio/vnd.wave' ,
'audio/qcelp' ,
'audio/x-ms-asf' ,
'video/x-ms-asf' ,
'application/vnd.ms-asf' ,
'video/mpeg' ,
'video/3gpp' ,
'audio/mpeg' ,
'audio/mp4' , // RFC 4337
'audio/opus' ,
'video/ogg' ,
'audio/ogg' ,
'application/ogg' ,
'audio/x-flac' ,
'audio/ape' ,
'audio/wavpack' ,
'audio/amr' ,
'application/pdf' ,
'application/x-elf' ,
2026-05-06 17:32:44 +08:00
'application/x-mach-binary' ,
2025-08-02 12:09:34 +08:00
'application/x-msdownload' ,
'application/x-shockwave-flash' ,
'application/rtf' ,
'application/wasm' ,
'font/woff' ,
'font/woff2' ,
'application/vnd.ms-fontobject' ,
'font/ttf' ,
'font/otf' ,
'image/x-icon' ,
'video/x-flv' ,
'application/postscript' ,
'application/eps' ,
'application/x-xz' ,
'application/x-sqlite3' ,
'application/x-nintendo-nes-rom' ,
'application/x-google-chrome-extension' ,
'application/vnd.ms-cab-compressed' ,
'application/x-deb' ,
'application/x-unix-archive' ,
'application/x-rpm' ,
'application/x-compress' ,
'application/x-lzip' ,
'application/x-cfb' ,
'application/x-mie' ,
'application/mxf' ,
'video/mp2t' ,
'application/x-blender' ,
'image/bpg' ,
'image/j2c' ,
'image/jp2' ,
'image/jpx' ,
'image/jpm' ,
'image/mj2' ,
'audio/aiff' ,
'application/xml' ,
'application/x-mobipocket-ebook' ,
'image/heif' ,
'image/heif-sequence' ,
'image/heic' ,
'image/heic-sequence' ,
'image/icns' ,
'image/ktx' ,
'application/dicom' ,
'audio/x-musepack' ,
'text/calendar' ,
'text/vcard' ,
'model/gltf-binary' ,
'application/vnd.tcpdump.pcap' ,
'audio/x-dsf' , // Non-standard
'application/x.ms.shortcut' , // Invented by us
'application/x.apple.alias' , // Invented by us
'audio/x-voc' ,
'audio/vnd.dolby.dd-raw' ,
'audio/x-m4a' ,
'image/apng' ,
'image/x-olympus-orf' ,
'image/x-sony-arw' ,
'image/x-adobe-dng' ,
'image/x-nikon-nef' ,
'image/x-panasonic-rw2' ,
'image/x-fujifilm-raf' ,
'video/x-m4v' ,
'video/3gpp2' ,
'application/x-esri-shape' ,
'audio/aac' ,
'audio/x-it' ,
'audio/x-s3m' ,
'audio/x-xm' ,
'video/MP1S' ,
'video/MP2P' ,
'application/vnd.sketchup.skp' ,
'image/avif' ,
'application/x-lzh-compressed' ,
'application/pgp-encrypted' ,
'application/x-asar' ,
'model/stl' ,
'application/vnd.ms-htmlhelp' ,
'model/3mf' ,
'image/jxl' ,
'application/zstd' ,
'image/jls' ,
'application/vnd.ms-outlook' ,
'image/vnd.dwg' ,
'application/x-parquet' ,
'application/java-vm' ,
'application/x-arj' ,
'application/x-cpio' ,
'application/x-ace-compressed' ,
'application/avro' ,
2026-05-06 17:32:44 +08:00
'application/vnd.iccprofile' ,
'application/x.autodesk.fbx' , // Invented by us
2025-08-02 12:09:34 +08:00
] ;
const minimumBytes = 4100 ; // A fair amount of file-types are detectable within this range.
async function fileTypeFromBuffer ( input ) {
2026-05-06 17:32:44 +08:00
return new FileTypeParser ( ) . fromBuffer ( input ) ;
2025-08-02 12:09:34 +08:00
}
function _check ( buffer , headers , options ) {
options = {
offset : 0 ,
... options ,
} ;
for ( const [ index , header ] of headers . entries ( ) ) {
// If a bitmask is set
if ( options . mask ) {
// If header doesn't equal `buf` with bits masked off
if ( header !== ( options . mask [ index ] & buffer [ index + options . offset ] ) ) {
return false ;
}
} else if ( header !== buffer [ index + options . offset ] ) {
return false ;
}
}
return true ;
}
2026-05-06 17:32:44 +08:00
class FileTypeParser {
constructor ( options ) {
this . detectors = options ? . customDetectors ;
this . fromTokenizer = this . fromTokenizer . bind ( this ) ;
this . fromBuffer = this . fromBuffer . bind ( this ) ;
this . parse = this . parse . bind ( this ) ;
}
async fromTokenizer ( tokenizer ) {
const initialPosition = tokenizer . position ;
for ( const detector of this . detectors || [ ] ) {
const fileType = await detector ( tokenizer ) ;
if ( fileType ) {
return fileType ;
}
if ( initialPosition !== tokenizer . position ) {
return undefined ; // Cannot proceed scanning of the tokenizer is at an arbitrary position
}
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
return this . parse ( tokenizer ) ;
}
async fromBuffer ( input ) {
if ( ! ( input instanceof Uint8Array || input instanceof ArrayBuffer ) ) {
throw new TypeError ( ` Expected the \` input \` argument to be of type \` Uint8Array \` or \` Buffer \` or \` ArrayBuffer \` , got \` ${ typeof input } \` ` ) ;
}
const buffer = input instanceof Uint8Array ? input : new Uint8Array ( input ) ;
if ( ! ( buffer ? . length > 1 ) ) {
return ;
}
return this . fromTokenizer ( fromBuffer ( buffer ) ) ;
}
async fromBlob ( blob ) {
const buffer = await blob . arrayBuffer ( ) ;
return this . fromBuffer ( new Uint8Array ( buffer ) ) ;
}
async fromStream ( stream ) {
const tokenizer = await fromStream ( stream ) ;
try {
return await this . fromTokenizer ( tokenizer ) ;
} finally {
await tokenizer . close ( ) ;
}
}
async toDetectionStream ( readableStream , options = { } ) {
const { default : stream } = await import ( 'node:stream' ) ;
const { sampleSize = minimumBytes } = options ;
return new Promise ( ( resolve , reject ) => {
readableStream . on ( 'error' , reject ) ;
readableStream . once ( 'readable' , ( ) => {
( async ( ) => {
try {
// Set up output stream
const pass = new stream . PassThrough ( ) ;
const outputStream = stream . pipeline ? stream . pipeline ( readableStream , pass , ( ) => { } ) : readableStream . pipe ( pass ) ;
// Read the input stream and detect the filetype
const chunk = readableStream . read ( sampleSize ) ? ? readableStream . read ( ) ? ? node _buffer . Buffer . alloc ( 0 ) ;
try {
pass . fileType = await this . fromBuffer ( chunk ) ;
} catch ( error ) {
if ( error instanceof EndOfStreamError ) {
pass . fileType = undefined ;
} else {
reject ( error ) ;
}
}
resolve ( outputStream ) ;
} catch ( error ) {
reject ( error ) ;
}
} ) ( ) ;
} ) ;
} ) ;
2025-08-02 12:09:34 +08:00
}
check ( header , options ) {
return _check ( this . buffer , header , options ) ;
}
checkString ( header , options ) {
return this . check ( stringToBytes ( header ) , options ) ;
}
async parse ( tokenizer ) {
this . buffer = node _buffer . Buffer . alloc ( minimumBytes ) ;
// Keep reading until EOF if the file size is unknown.
if ( tokenizer . fileInfo . size === undefined ) {
tokenizer . fileInfo . size = Number . MAX _SAFE _INTEGER ;
}
this . tokenizer = tokenizer ;
await tokenizer . peekBuffer ( this . buffer , { length : 12 , mayBeLess : true } ) ;
// -- 2-byte signatures --
if ( this . check ( [ 0x42 , 0x4D ] ) ) {
return {
ext : 'bmp' ,
mime : 'image/bmp' ,
} ;
}
if ( this . check ( [ 0x0B , 0x77 ] ) ) {
return {
ext : 'ac3' ,
mime : 'audio/vnd.dolby.dd-raw' ,
} ;
}
if ( this . check ( [ 0x78 , 0x01 ] ) ) {
return {
ext : 'dmg' ,
mime : 'application/x-apple-diskimage' ,
} ;
}
if ( this . check ( [ 0x4D , 0x5A ] ) ) {
return {
ext : 'exe' ,
mime : 'application/x-msdownload' ,
} ;
}
if ( this . check ( [ 0x25 , 0x21 ] ) ) {
await tokenizer . peekBuffer ( this . buffer , { length : 24 , mayBeLess : true } ) ;
if (
this . checkString ( 'PS-Adobe-' , { offset : 2 } )
&& this . checkString ( ' EPSF-' , { offset : 14 } )
) {
return {
ext : 'eps' ,
mime : 'application/eps' ,
} ;
}
return {
ext : 'ps' ,
mime : 'application/postscript' ,
} ;
}
if (
this . check ( [ 0x1F , 0xA0 ] )
|| this . check ( [ 0x1F , 0x9D ] )
) {
return {
ext : 'Z' ,
mime : 'application/x-compress' ,
} ;
}
if ( this . check ( [ 0xC7 , 0x71 ] ) ) {
return {
ext : 'cpio' ,
mime : 'application/x-cpio' ,
} ;
}
if ( this . check ( [ 0x60 , 0xEA ] ) ) {
return {
ext : 'arj' ,
mime : 'application/x-arj' ,
} ;
}
// -- 3-byte signatures --
if ( this . check ( [ 0xEF , 0xBB , 0xBF ] ) ) { // UTF-8-BOM
// Strip off UTF-8-BOM
this . tokenizer . ignore ( 3 ) ;
return this . parse ( tokenizer ) ;
}
if ( this . check ( [ 0x47 , 0x49 , 0x46 ] ) ) {
return {
ext : 'gif' ,
mime : 'image/gif' ,
} ;
}
if ( this . check ( [ 0x49 , 0x49 , 0xBC ] ) ) {
return {
ext : 'jxr' ,
mime : 'image/vnd.ms-photo' ,
} ;
}
if ( this . check ( [ 0x1F , 0x8B , 0x8 ] ) ) {
return {
ext : 'gz' ,
mime : 'application/gzip' ,
} ;
}
if ( this . check ( [ 0x42 , 0x5A , 0x68 ] ) ) {
return {
ext : 'bz2' ,
mime : 'application/x-bzip2' ,
} ;
}
if ( this . checkString ( 'ID3' ) ) {
await tokenizer . ignore ( 6 ) ; // Skip ID3 header until the header size
const id3HeaderLength = await tokenizer . readToken ( uint32SyncSafeToken ) ;
if ( tokenizer . position + id3HeaderLength > tokenizer . fileInfo . size ) {
// Guess file type based on ID3 header for backward compatibility
return {
ext : 'mp3' ,
mime : 'audio/mpeg' ,
} ;
}
await tokenizer . ignore ( id3HeaderLength ) ;
2026-05-06 17:32:44 +08:00
return this . fromTokenizer ( tokenizer ) ; // Skip ID3 header, recursion
2025-08-02 12:09:34 +08:00
}
// Musepack, SV7
if ( this . checkString ( 'MP+' ) ) {
return {
ext : 'mpc' ,
mime : 'audio/x-musepack' ,
} ;
}
if (
( this . buffer [ 0 ] === 0x43 || this . buffer [ 0 ] === 0x46 )
&& this . check ( [ 0x57 , 0x53 ] , { offset : 1 } )
) {
return {
ext : 'swf' ,
mime : 'application/x-shockwave-flash' ,
} ;
}
// -- 4-byte signatures --
// Requires a sample size of 4 bytes
if ( this . check ( [ 0xFF , 0xD8 , 0xFF ] ) ) {
if ( this . check ( [ 0xF7 ] , { offset : 3 } ) ) { // JPG7/SOF55, indicating a ISO/IEC 14495 / JPEG-LS file
return {
ext : 'jls' ,
mime : 'image/jls' ,
} ;
}
return {
ext : 'jpg' ,
mime : 'image/jpeg' ,
} ;
}
if ( this . check ( [ 0x4F , 0x62 , 0x6A , 0x01 ] ) ) {
return {
ext : 'avro' ,
mime : 'application/avro' ,
} ;
}
if ( this . checkString ( 'FLIF' ) ) {
return {
ext : 'flif' ,
mime : 'image/flif' ,
} ;
}
if ( this . checkString ( '8BPS' ) ) {
return {
ext : 'psd' ,
mime : 'image/vnd.adobe.photoshop' ,
} ;
}
if ( this . checkString ( 'WEBP' , { offset : 8 } ) ) {
return {
ext : 'webp' ,
mime : 'image/webp' ,
} ;
}
// Musepack, SV8
if ( this . checkString ( 'MPCK' ) ) {
return {
ext : 'mpc' ,
mime : 'audio/x-musepack' ,
} ;
}
if ( this . checkString ( 'FORM' ) ) {
return {
ext : 'aif' ,
mime : 'audio/aiff' ,
} ;
}
if ( this . checkString ( 'icns' , { offset : 0 } ) ) {
return {
ext : 'icns' ,
mime : 'image/icns' ,
} ;
}
// Zip-based file formats
// Need to be before the `zip` check
if ( this . check ( [ 0x50 , 0x4B , 0x3 , 0x4 ] ) ) { // Local file header signature
try {
while ( tokenizer . position + 30 < tokenizer . fileInfo . size ) {
await tokenizer . readBuffer ( this . buffer , { length : 30 } ) ;
// https://en.wikipedia.org/wiki/Zip_(file_format)#File_headers
const zipHeader = {
compressedSize : this . buffer . readUInt32LE ( 18 ) ,
uncompressedSize : this . buffer . readUInt32LE ( 22 ) ,
filenameLength : this . buffer . readUInt16LE ( 26 ) ,
extraFieldLength : this . buffer . readUInt16LE ( 28 ) ,
} ;
zipHeader . filename = await tokenizer . readToken ( new StringType ( zipHeader . filenameLength , 'utf-8' ) ) ;
await tokenizer . ignore ( zipHeader . extraFieldLength ) ;
// Assumes signed `.xpi` from addons.mozilla.org
if ( zipHeader . filename === 'META-INF/mozilla.rsa' ) {
return {
ext : 'xpi' ,
mime : 'application/x-xpinstall' ,
} ;
}
if ( zipHeader . filename . endsWith ( '.rels' ) || zipHeader . filename . endsWith ( '.xml' ) ) {
const type = zipHeader . filename . split ( '/' ) [ 0 ] ;
switch ( type ) {
case '_rels' :
break ;
case 'word' :
return {
ext : 'docx' ,
mime : 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' ,
} ;
case 'ppt' :
return {
ext : 'pptx' ,
mime : 'application/vnd.openxmlformats-officedocument.presentationml.presentation' ,
} ;
case 'xl' :
return {
ext : 'xlsx' ,
mime : 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ,
} ;
default :
break ;
}
}
if ( zipHeader . filename . startsWith ( 'xl/' ) ) {
return {
ext : 'xlsx' ,
mime : 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ,
} ;
}
if ( zipHeader . filename . startsWith ( '3D/' ) && zipHeader . filename . endsWith ( '.model' ) ) {
return {
ext : '3mf' ,
mime : 'model/3mf' ,
} ;
}
// The docx, xlsx and pptx file types extend the Office Open XML file format:
// https://en.wikipedia.org/wiki/Office_Open_XML_file_formats
// We look for:
// - one entry named '[Content_Types].xml' or '_rels/.rels',
// - one entry indicating specific type of file.
// MS Office, OpenOffice and LibreOffice may put the parts in different order, so the check should not rely on it.
if ( zipHeader . filename === 'mimetype' && zipHeader . compressedSize === zipHeader . uncompressedSize ) {
let mimeType = await tokenizer . readToken ( new StringType ( zipHeader . compressedSize , 'utf-8' ) ) ;
mimeType = mimeType . trim ( ) ;
switch ( mimeType ) {
case 'application/epub+zip' :
return {
ext : 'epub' ,
mime : 'application/epub+zip' ,
} ;
case 'application/vnd.oasis.opendocument.text' :
return {
ext : 'odt' ,
mime : 'application/vnd.oasis.opendocument.text' ,
} ;
case 'application/vnd.oasis.opendocument.spreadsheet' :
return {
ext : 'ods' ,
mime : 'application/vnd.oasis.opendocument.spreadsheet' ,
} ;
case 'application/vnd.oasis.opendocument.presentation' :
return {
ext : 'odp' ,
mime : 'application/vnd.oasis.opendocument.presentation' ,
} ;
default :
}
}
// Try to find next header manually when current one is corrupted
if ( zipHeader . compressedSize === 0 ) {
let nextHeaderIndex = - 1 ;
while ( nextHeaderIndex < 0 && ( tokenizer . position < tokenizer . fileInfo . size ) ) {
await tokenizer . peekBuffer ( this . buffer , { mayBeLess : true } ) ;
nextHeaderIndex = this . buffer . indexOf ( '504B0304' , 0 , 'hex' ) ;
// Move position to the next header if found, skip the whole buffer otherwise
await tokenizer . ignore ( nextHeaderIndex >= 0 ? nextHeaderIndex : this . buffer . length ) ;
}
} else {
await tokenizer . ignore ( zipHeader . compressedSize ) ;
}
}
} catch ( error ) {
if ( ! ( error instanceof EndOfStreamError ) ) {
throw error ;
}
}
return {
ext : 'zip' ,
mime : 'application/zip' ,
} ;
}
if ( this . checkString ( 'OggS' ) ) {
// This is an OGG container
await tokenizer . ignore ( 28 ) ;
const type = node _buffer . Buffer . alloc ( 8 ) ;
await tokenizer . readBuffer ( type ) ;
// Needs to be before `ogg` check
if ( _check ( type , [ 0x4F , 0x70 , 0x75 , 0x73 , 0x48 , 0x65 , 0x61 , 0x64 ] ) ) {
return {
ext : 'opus' ,
mime : 'audio/opus' ,
} ;
}
// If ' theora' in header.
if ( _check ( type , [ 0x80 , 0x74 , 0x68 , 0x65 , 0x6F , 0x72 , 0x61 ] ) ) {
return {
ext : 'ogv' ,
mime : 'video/ogg' ,
} ;
}
// If '\x01video' in header.
if ( _check ( type , [ 0x01 , 0x76 , 0x69 , 0x64 , 0x65 , 0x6F , 0x00 ] ) ) {
return {
ext : 'ogm' ,
mime : 'video/ogg' ,
} ;
}
// If ' FLAC' in header https://xiph.org/flac/faq.html
if ( _check ( type , [ 0x7F , 0x46 , 0x4C , 0x41 , 0x43 ] ) ) {
return {
ext : 'oga' ,
mime : 'audio/ogg' ,
} ;
}
// 'Speex ' in header https://en.wikipedia.org/wiki/Speex
if ( _check ( type , [ 0x53 , 0x70 , 0x65 , 0x65 , 0x78 , 0x20 , 0x20 ] ) ) {
return {
ext : 'spx' ,
mime : 'audio/ogg' ,
} ;
}
// If '\x01vorbis' in header
if ( _check ( type , [ 0x01 , 0x76 , 0x6F , 0x72 , 0x62 , 0x69 , 0x73 ] ) ) {
return {
ext : 'ogg' ,
mime : 'audio/ogg' ,
} ;
}
// Default OGG container https://www.iana.org/assignments/media-types/application/ogg
return {
ext : 'ogx' ,
mime : 'application/ogg' ,
} ;
}
if (
this . check ( [ 0x50 , 0x4B ] )
&& ( this . buffer [ 2 ] === 0x3 || this . buffer [ 2 ] === 0x5 || this . buffer [ 2 ] === 0x7 )
&& ( this . buffer [ 3 ] === 0x4 || this . buffer [ 3 ] === 0x6 || this . buffer [ 3 ] === 0x8 )
) {
return {
ext : 'zip' ,
mime : 'application/zip' ,
} ;
}
//
// File Type Box (https://en.wikipedia.org/wiki/ISO_base_media_file_format)
// It's not required to be first, but it's recommended to be. Almost all ISO base media files start with `ftyp` box.
// `ftyp` box must contain a brand major identifier, which must consist of ISO 8859-1 printable characters.
// Here we check for 8859-1 printable characters (for simplicity, it's a mask which also catches one non-printable character).
if (
this . checkString ( 'ftyp' , { offset : 4 } )
&& ( this . buffer [ 8 ] & 0x60 ) !== 0x00 // Brand major, first character ASCII?
) {
// They all can have MIME `video/mp4` except `application/mp4` special-case which is hard to detect.
// For some cases, we're specific, everything else falls to `video/mp4` with `mp4` extension.
const brandMajor = this . buffer . toString ( 'binary' , 8 , 12 ) . replace ( '\0' , ' ' ) . trim ( ) ;
switch ( brandMajor ) {
case 'avif' :
case 'avis' :
return { ext : 'avif' , mime : 'image/avif' } ;
case 'mif1' :
return { ext : 'heic' , mime : 'image/heif' } ;
case 'msf1' :
return { ext : 'heic' , mime : 'image/heif-sequence' } ;
case 'heic' :
case 'heix' :
return { ext : 'heic' , mime : 'image/heic' } ;
case 'hevc' :
case 'hevx' :
return { ext : 'heic' , mime : 'image/heic-sequence' } ;
case 'qt' :
return { ext : 'mov' , mime : 'video/quicktime' } ;
case 'M4V' :
case 'M4VH' :
case 'M4VP' :
return { ext : 'm4v' , mime : 'video/x-m4v' } ;
case 'M4P' :
return { ext : 'm4p' , mime : 'video/mp4' } ;
case 'M4B' :
return { ext : 'm4b' , mime : 'audio/mp4' } ;
case 'M4A' :
return { ext : 'm4a' , mime : 'audio/x-m4a' } ;
case 'F4V' :
return { ext : 'f4v' , mime : 'video/mp4' } ;
case 'F4P' :
return { ext : 'f4p' , mime : 'video/mp4' } ;
case 'F4A' :
return { ext : 'f4a' , mime : 'audio/mp4' } ;
case 'F4B' :
return { ext : 'f4b' , mime : 'audio/mp4' } ;
case 'crx' :
return { ext : 'cr3' , mime : 'image/x-canon-cr3' } ;
default :
if ( brandMajor . startsWith ( '3g' ) ) {
if ( brandMajor . startsWith ( '3g2' ) ) {
return { ext : '3g2' , mime : 'video/3gpp2' } ;
}
return { ext : '3gp' , mime : 'video/3gpp' } ;
}
return { ext : 'mp4' , mime : 'video/mp4' } ;
}
}
if ( this . checkString ( 'MThd' ) ) {
return {
ext : 'mid' ,
mime : 'audio/midi' ,
} ;
}
if (
this . checkString ( 'wOFF' )
&& (
this . check ( [ 0x00 , 0x01 , 0x00 , 0x00 ] , { offset : 4 } )
|| this . checkString ( 'OTTO' , { offset : 4 } )
)
) {
return {
ext : 'woff' ,
mime : 'font/woff' ,
} ;
}
if (
this . checkString ( 'wOF2' )
&& (
this . check ( [ 0x00 , 0x01 , 0x00 , 0x00 ] , { offset : 4 } )
|| this . checkString ( 'OTTO' , { offset : 4 } )
)
) {
return {
ext : 'woff2' ,
mime : 'font/woff2' ,
} ;
}
if ( this . check ( [ 0xD4 , 0xC3 , 0xB2 , 0xA1 ] ) || this . check ( [ 0xA1 , 0xB2 , 0xC3 , 0xD4 ] ) ) {
return {
ext : 'pcap' ,
mime : 'application/vnd.tcpdump.pcap' ,
} ;
}
// Sony DSD Stream File (DSF)
if ( this . checkString ( 'DSD ' ) ) {
return {
ext : 'dsf' ,
mime : 'audio/x-dsf' , // Non-standard
} ;
}
if ( this . checkString ( 'LZIP' ) ) {
return {
ext : 'lz' ,
mime : 'application/x-lzip' ,
} ;
}
if ( this . checkString ( 'fLaC' ) ) {
return {
ext : 'flac' ,
mime : 'audio/x-flac' ,
} ;
}
if ( this . check ( [ 0x42 , 0x50 , 0x47 , 0xFB ] ) ) {
return {
ext : 'bpg' ,
mime : 'image/bpg' ,
} ;
}
if ( this . checkString ( 'wvpk' ) ) {
return {
ext : 'wv' ,
mime : 'audio/wavpack' ,
} ;
}
if ( this . checkString ( '%PDF' ) ) {
try {
await tokenizer . ignore ( 1350 ) ;
const maxBufferSize = 10 * 1024 * 1024 ;
const buffer = node _buffer . Buffer . alloc ( Math . min ( maxBufferSize , tokenizer . fileInfo . size ) ) ;
await tokenizer . readBuffer ( buffer , { mayBeLess : true } ) ;
// Check if this is an Adobe Illustrator file
if ( buffer . includes ( node _buffer . Buffer . from ( 'AIPrivateData' ) ) ) {
return {
ext : 'ai' ,
mime : 'application/postscript' ,
} ;
}
} catch ( error ) {
// Swallow end of stream error if file is too small for the Adobe AI check
if ( ! ( error instanceof EndOfStreamError ) ) {
throw error ;
}
}
// Assume this is just a normal PDF
return {
ext : 'pdf' ,
mime : 'application/pdf' ,
} ;
}
if ( this . check ( [ 0x00 , 0x61 , 0x73 , 0x6D ] ) ) {
return {
ext : 'wasm' ,
mime : 'application/wasm' ,
} ;
}
// TIFF, little-endian type
if ( this . check ( [ 0x49 , 0x49 ] ) ) {
const fileType = await this . readTiffHeader ( false ) ;
if ( fileType ) {
return fileType ;
}
}
// TIFF, big-endian type
if ( this . check ( [ 0x4D , 0x4D ] ) ) {
const fileType = await this . readTiffHeader ( true ) ;
if ( fileType ) {
return fileType ;
}
}
if ( this . checkString ( 'MAC ' ) ) {
return {
ext : 'ape' ,
mime : 'audio/ape' ,
} ;
}
2026-05-06 17:32:44 +08:00
// https://github.com/file/file/blob/master/magic/Magdir/matroska
2025-08-02 12:09:34 +08:00
if ( this . check ( [ 0x1A , 0x45 , 0xDF , 0xA3 ] ) ) { // Root element: EBML
async function readField ( ) {
const msb = await tokenizer . peekNumber ( UINT8 ) ;
let mask = 0x80 ;
let ic = 0 ; // 0 = A, 1 = B, 2 = C, 3
// = D
while ( ( msb & mask ) === 0 && mask !== 0 ) {
++ ic ;
mask >>= 1 ;
}
const id = node _buffer . Buffer . alloc ( ic + 1 ) ;
await tokenizer . readBuffer ( id ) ;
return id ;
}
async function readElement ( ) {
const id = await readField ( ) ;
const lengthField = await readField ( ) ;
lengthField [ 0 ] ^= 0x80 >> ( lengthField . length - 1 ) ;
const nrLength = Math . min ( 6 , lengthField . length ) ; // JavaScript can max read 6 bytes integer
return {
id : id . readUIntBE ( 0 , id . length ) ,
len : lengthField . readUIntBE ( lengthField . length - nrLength , nrLength ) ,
} ;
}
async function readChildren ( children ) {
while ( children > 0 ) {
const element = await readElement ( ) ;
if ( element . id === 0x42 _82 ) {
const rawValue = await tokenizer . readToken ( new StringType ( element . len , 'utf-8' ) ) ;
return rawValue . replace ( /\00.*$/g , '' ) ; // Return DocType
}
await tokenizer . ignore ( element . len ) ; // ignore payload
-- children ;
}
}
const re = await readElement ( ) ;
const docType = await readChildren ( re . len ) ;
switch ( docType ) {
case 'webm' :
return {
ext : 'webm' ,
mime : 'video/webm' ,
} ;
case 'matroska' :
return {
ext : 'mkv' ,
mime : 'video/x-matroska' ,
} ;
default :
return ;
}
}
// RIFF file format which might be AVI, WAV, QCP, etc
if ( this . check ( [ 0x52 , 0x49 , 0x46 , 0x46 ] ) ) {
if ( this . check ( [ 0x41 , 0x56 , 0x49 ] , { offset : 8 } ) ) {
return {
ext : 'avi' ,
mime : 'video/vnd.avi' ,
} ;
}
if ( this . check ( [ 0x57 , 0x41 , 0x56 , 0x45 ] , { offset : 8 } ) ) {
return {
ext : 'wav' ,
mime : 'audio/vnd.wave' ,
} ;
}
// QLCM, QCP file
if ( this . check ( [ 0x51 , 0x4C , 0x43 , 0x4D ] , { offset : 8 } ) ) {
return {
ext : 'qcp' ,
mime : 'audio/qcelp' ,
} ;
}
}
if ( this . checkString ( 'SQLi' ) ) {
return {
ext : 'sqlite' ,
mime : 'application/x-sqlite3' ,
} ;
}
if ( this . check ( [ 0x4E , 0x45 , 0x53 , 0x1A ] ) ) {
return {
ext : 'nes' ,
mime : 'application/x-nintendo-nes-rom' ,
} ;
}
if ( this . checkString ( 'Cr24' ) ) {
return {
ext : 'crx' ,
mime : 'application/x-google-chrome-extension' ,
} ;
}
if (
this . checkString ( 'MSCF' )
|| this . checkString ( 'ISc(' )
) {
return {
ext : 'cab' ,
mime : 'application/vnd.ms-cab-compressed' ,
} ;
}
if ( this . check ( [ 0xED , 0xAB , 0xEE , 0xDB ] ) ) {
return {
ext : 'rpm' ,
mime : 'application/x-rpm' ,
} ;
}
if ( this . check ( [ 0xC5 , 0xD0 , 0xD3 , 0xC6 ] ) ) {
return {
ext : 'eps' ,
mime : 'application/eps' ,
} ;
}
if ( this . check ( [ 0x28 , 0xB5 , 0x2F , 0xFD ] ) ) {
return {
ext : 'zst' ,
mime : 'application/zstd' ,
} ;
}
if ( this . check ( [ 0x7F , 0x45 , 0x4C , 0x46 ] ) ) {
return {
ext : 'elf' ,
mime : 'application/x-elf' ,
} ;
}
if ( this . check ( [ 0x21 , 0x42 , 0x44 , 0x4E ] ) ) {
return {
ext : 'pst' ,
mime : 'application/vnd.ms-outlook' ,
} ;
}
if ( this . checkString ( 'PAR1' ) ) {
return {
ext : 'parquet' ,
mime : 'application/x-parquet' ,
} ;
}
2026-05-06 17:32:44 +08:00
if ( this . check ( [ 0xCF , 0xFA , 0xED , 0xFE ] ) ) {
return {
ext : 'macho' ,
mime : 'application/x-mach-binary' ,
} ;
}
2025-08-02 12:09:34 +08:00
// -- 5-byte signatures --
if ( this . check ( [ 0x4F , 0x54 , 0x54 , 0x4F , 0x00 ] ) ) {
return {
ext : 'otf' ,
mime : 'font/otf' ,
} ;
}
if ( this . checkString ( '#!AMR' ) ) {
return {
ext : 'amr' ,
mime : 'audio/amr' ,
} ;
}
if ( this . checkString ( '{\\rtf' ) ) {
return {
ext : 'rtf' ,
mime : 'application/rtf' ,
} ;
}
if ( this . check ( [ 0x46 , 0x4C , 0x56 , 0x01 ] ) ) {
return {
ext : 'flv' ,
mime : 'video/x-flv' ,
} ;
}
if ( this . checkString ( 'IMPM' ) ) {
return {
ext : 'it' ,
mime : 'audio/x-it' ,
} ;
}
if (
this . checkString ( '-lh0-' , { offset : 2 } )
|| this . checkString ( '-lh1-' , { offset : 2 } )
|| this . checkString ( '-lh2-' , { offset : 2 } )
|| this . checkString ( '-lh3-' , { offset : 2 } )
|| this . checkString ( '-lh4-' , { offset : 2 } )
|| this . checkString ( '-lh5-' , { offset : 2 } )
|| this . checkString ( '-lh6-' , { offset : 2 } )
|| this . checkString ( '-lh7-' , { offset : 2 } )
|| this . checkString ( '-lzs-' , { offset : 2 } )
|| this . checkString ( '-lz4-' , { offset : 2 } )
|| this . checkString ( '-lz5-' , { offset : 2 } )
|| this . checkString ( '-lhd-' , { offset : 2 } )
) {
return {
ext : 'lzh' ,
mime : 'application/x-lzh-compressed' ,
} ;
}
// MPEG program stream (PS or MPEG-PS)
if ( this . check ( [ 0x00 , 0x00 , 0x01 , 0xBA ] ) ) {
// MPEG-PS, MPEG-1 Part 1
if ( this . check ( [ 0x21 ] , { offset : 4 , mask : [ 0xF1 ] } ) ) {
return {
ext : 'mpg' , // May also be .ps, .mpeg
mime : 'video/MP1S' ,
} ;
}
// MPEG-PS, MPEG-2 Part 1
if ( this . check ( [ 0x44 ] , { offset : 4 , mask : [ 0xC4 ] } ) ) {
return {
ext : 'mpg' , // May also be .mpg, .m2p, .vob or .sub
mime : 'video/MP2P' ,
} ;
}
}
if ( this . checkString ( 'ITSF' ) ) {
return {
ext : 'chm' ,
mime : 'application/vnd.ms-htmlhelp' ,
} ;
}
if ( this . check ( [ 0xCA , 0xFE , 0xBA , 0xBE ] ) ) {
return {
ext : 'class' ,
mime : 'application/java-vm' ,
} ;
}
// -- 6-byte signatures --
if ( this . check ( [ 0xFD , 0x37 , 0x7A , 0x58 , 0x5A , 0x00 ] ) ) {
return {
ext : 'xz' ,
mime : 'application/x-xz' ,
} ;
}
if ( this . checkString ( '<?xml ' ) ) {
return {
ext : 'xml' ,
mime : 'application/xml' ,
} ;
}
if ( this . check ( [ 0x37 , 0x7A , 0xBC , 0xAF , 0x27 , 0x1C ] ) ) {
return {
ext : '7z' ,
mime : 'application/x-7z-compressed' ,
} ;
}
if (
this . check ( [ 0x52 , 0x61 , 0x72 , 0x21 , 0x1A , 0x7 ] )
&& ( this . buffer [ 6 ] === 0x0 || this . buffer [ 6 ] === 0x1 )
) {
return {
ext : 'rar' ,
mime : 'application/x-rar-compressed' ,
} ;
}
if ( this . checkString ( 'solid ' ) ) {
return {
ext : 'stl' ,
mime : 'model/stl' ,
} ;
}
if ( this . checkString ( 'AC' ) ) {
const version = this . buffer . toString ( 'binary' , 2 , 6 ) ;
if ( version . match ( '^d*' ) && version >= 1000 && version <= 1050 ) {
return {
ext : 'dwg' ,
mime : 'image/vnd.dwg' ,
} ;
}
}
if ( this . checkString ( '070707' ) ) {
return {
ext : 'cpio' ,
mime : 'application/x-cpio' ,
} ;
}
// -- 7-byte signatures --
if ( this . checkString ( 'BLENDER' ) ) {
return {
ext : 'blend' ,
mime : 'application/x-blender' ,
} ;
}
if ( this . checkString ( '!<arch>' ) ) {
await tokenizer . ignore ( 8 ) ;
const string = await tokenizer . readToken ( new StringType ( 13 , 'ascii' ) ) ;
if ( string === 'debian-binary' ) {
return {
ext : 'deb' ,
mime : 'application/x-deb' ,
} ;
}
return {
ext : 'ar' ,
mime : 'application/x-unix-archive' ,
} ;
}
if ( this . checkString ( '**ACE' , { offset : 7 } ) ) {
await tokenizer . peekBuffer ( this . buffer , { length : 14 , mayBeLess : true } ) ;
if ( this . checkString ( '**' , { offset : 12 } ) ) {
return {
ext : 'ace' ,
mime : 'application/x-ace-compressed' ,
} ;
}
}
// -- 8-byte signatures --
if ( this . check ( [ 0x89 , 0x50 , 0x4E , 0x47 , 0x0D , 0x0A , 0x1A , 0x0A ] ) ) {
// APNG format (https://wiki.mozilla.org/APNG_Specification)
// 1. Find the first IDAT (image data) chunk (49 44 41 54)
// 2. Check if there is an "acTL" chunk before the IDAT one (61 63 54 4C)
// Offset calculated as follows:
// - 8 bytes: PNG signature
// - 4 (length) + 4 (chunk type) + 13 (chunk data) + 4 (CRC): IHDR chunk
await tokenizer . ignore ( 8 ) ; // ignore PNG signature
async function readChunkHeader ( ) {
return {
length : await tokenizer . readToken ( INT32 _BE ) ,
type : await tokenizer . readToken ( new StringType ( 4 , 'binary' ) ) ,
} ;
}
do {
const chunk = await readChunkHeader ( ) ;
if ( chunk . length < 0 ) {
return ; // Invalid chunk length
}
switch ( chunk . type ) {
case 'IDAT' :
return {
ext : 'png' ,
mime : 'image/png' ,
} ;
case 'acTL' :
return {
ext : 'apng' ,
mime : 'image/apng' ,
} ;
default :
await tokenizer . ignore ( chunk . length + 4 ) ; // Ignore chunk-data + CRC
}
} while ( tokenizer . position + 8 < tokenizer . fileInfo . size ) ;
return {
ext : 'png' ,
mime : 'image/png' ,
} ;
}
if ( this . check ( [ 0x41 , 0x52 , 0x52 , 0x4F , 0x57 , 0x31 , 0x00 , 0x00 ] ) ) {
return {
ext : 'arrow' ,
mime : 'application/x-apache-arrow' ,
} ;
}
if ( this . check ( [ 0x67 , 0x6C , 0x54 , 0x46 , 0x02 , 0x00 , 0x00 , 0x00 ] ) ) {
return {
ext : 'glb' ,
mime : 'model/gltf-binary' ,
} ;
}
// `mov` format variants
if (
this . check ( [ 0x66 , 0x72 , 0x65 , 0x65 ] , { offset : 4 } ) // `free`
|| this . check ( [ 0x6D , 0x64 , 0x61 , 0x74 ] , { offset : 4 } ) // `mdat` MJPEG
|| this . check ( [ 0x6D , 0x6F , 0x6F , 0x76 ] , { offset : 4 } ) // `moov`
|| this . check ( [ 0x77 , 0x69 , 0x64 , 0x65 ] , { offset : 4 } ) // `wide`
) {
return {
ext : 'mov' ,
mime : 'video/quicktime' ,
} ;
}
// -- 9-byte signatures --
if ( this . check ( [ 0x49 , 0x49 , 0x52 , 0x4F , 0x08 , 0x00 , 0x00 , 0x00 , 0x18 ] ) ) {
return {
ext : 'orf' ,
mime : 'image/x-olympus-orf' ,
} ;
}
if ( this . checkString ( 'gimp xcf ' ) ) {
return {
ext : 'xcf' ,
mime : 'image/x-xcf' ,
} ;
}
// -- 12-byte signatures --
if ( this . check ( [ 0x49 , 0x49 , 0x55 , 0x00 , 0x18 , 0x00 , 0x00 , 0x00 , 0x88 , 0xE7 , 0x74 , 0xD8 ] ) ) {
return {
ext : 'rw2' ,
mime : 'image/x-panasonic-rw2' ,
} ;
}
// ASF_Header_Object first 80 bytes
if ( this . check ( [ 0x30 , 0x26 , 0xB2 , 0x75 , 0x8E , 0x66 , 0xCF , 0x11 , 0xA6 , 0xD9 ] ) ) {
async function readHeader ( ) {
const guid = node _buffer . Buffer . alloc ( 16 ) ;
await tokenizer . readBuffer ( guid ) ;
return {
id : guid ,
size : Number ( await tokenizer . readToken ( UINT64 _LE ) ) ,
} ;
}
await tokenizer . ignore ( 30 ) ;
// Search for header should be in first 1KB of file.
while ( tokenizer . position + 24 < tokenizer . fileInfo . size ) {
const header = await readHeader ( ) ;
let payload = header . size - 24 ;
if ( _check ( header . id , [ 0x91 , 0x07 , 0xDC , 0xB7 , 0xB7 , 0xA9 , 0xCF , 0x11 , 0x8E , 0xE6 , 0x00 , 0xC0 , 0x0C , 0x20 , 0x53 , 0x65 ] ) ) {
// Sync on Stream-Properties-Object (B7DC0791-A9B7-11CF-8EE6-00C00C205365)
const typeId = node _buffer . Buffer . alloc ( 16 ) ;
payload -= await tokenizer . readBuffer ( typeId ) ;
if ( _check ( typeId , [ 0x40 , 0x9E , 0x69 , 0xF8 , 0x4D , 0x5B , 0xCF , 0x11 , 0xA8 , 0xFD , 0x00 , 0x80 , 0x5F , 0x5C , 0x44 , 0x2B ] ) ) {
// Found audio:
return {
ext : 'asf' ,
mime : 'audio/x-ms-asf' ,
} ;
}
if ( _check ( typeId , [ 0xC0 , 0xEF , 0x19 , 0xBC , 0x4D , 0x5B , 0xCF , 0x11 , 0xA8 , 0xFD , 0x00 , 0x80 , 0x5F , 0x5C , 0x44 , 0x2B ] ) ) {
// Found video:
return {
ext : 'asf' ,
mime : 'video/x-ms-asf' ,
} ;
}
break ;
}
await tokenizer . ignore ( payload ) ;
}
// Default to ASF generic extension
return {
ext : 'asf' ,
mime : 'application/vnd.ms-asf' ,
} ;
}
if ( this . check ( [ 0xAB , 0x4B , 0x54 , 0x58 , 0x20 , 0x31 , 0x31 , 0xBB , 0x0D , 0x0A , 0x1A , 0x0A ] ) ) {
return {
ext : 'ktx' ,
mime : 'image/ktx' ,
} ;
}
if ( ( this . check ( [ 0x7E , 0x10 , 0x04 ] ) || this . check ( [ 0x7E , 0x18 , 0x04 ] ) ) && this . check ( [ 0x30 , 0x4D , 0x49 , 0x45 ] , { offset : 4 } ) ) {
return {
ext : 'mie' ,
mime : 'application/x-mie' ,
} ;
}
if ( this . check ( [ 0x27 , 0x0A , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 ] , { offset : 2 } ) ) {
return {
ext : 'shp' ,
mime : 'application/x-esri-shape' ,
} ;
}
if ( this . check ( [ 0xFF , 0x4F , 0xFF , 0x51 ] ) ) {
return {
ext : 'j2c' ,
mime : 'image/j2c' ,
} ;
}
if ( this . check ( [ 0x00 , 0x00 , 0x00 , 0x0C , 0x6A , 0x50 , 0x20 , 0x20 , 0x0D , 0x0A , 0x87 , 0x0A ] ) ) {
// JPEG-2000 family
await tokenizer . ignore ( 20 ) ;
const type = await tokenizer . readToken ( new StringType ( 4 , 'ascii' ) ) ;
switch ( type ) {
case 'jp2 ' :
return {
ext : 'jp2' ,
mime : 'image/jp2' ,
} ;
case 'jpx ' :
return {
ext : 'jpx' ,
mime : 'image/jpx' ,
} ;
case 'jpm ' :
return {
ext : 'jpm' ,
mime : 'image/jpm' ,
} ;
case 'mjp2' :
return {
ext : 'mj2' ,
mime : 'image/mj2' ,
} ;
default :
return ;
}
}
if (
this . check ( [ 0xFF , 0x0A ] )
|| this . check ( [ 0x00 , 0x00 , 0x00 , 0x0C , 0x4A , 0x58 , 0x4C , 0x20 , 0x0D , 0x0A , 0x87 , 0x0A ] )
) {
return {
ext : 'jxl' ,
mime : 'image/jxl' ,
} ;
}
if ( this . check ( [ 0xFE , 0xFF ] ) ) { // UTF-16-BOM-LE
if ( this . check ( [ 0 , 60 , 0 , 63 , 0 , 120 , 0 , 109 , 0 , 108 ] , { offset : 2 } ) ) {
return {
ext : 'xml' ,
mime : 'application/xml' ,
} ;
}
return undefined ; // Some unknown text based format
}
// -- Unsafe signatures --
if (
this . check ( [ 0x0 , 0x0 , 0x1 , 0xBA ] )
|| this . check ( [ 0x0 , 0x0 , 0x1 , 0xB3 ] )
) {
return {
ext : 'mpg' ,
mime : 'video/mpeg' ,
} ;
}
if ( this . check ( [ 0x00 , 0x01 , 0x00 , 0x00 , 0x00 ] ) ) {
return {
ext : 'ttf' ,
mime : 'font/ttf' ,
} ;
}
if ( this . check ( [ 0x00 , 0x00 , 0x01 , 0x00 ] ) ) {
return {
ext : 'ico' ,
mime : 'image/x-icon' ,
} ;
}
if ( this . check ( [ 0x00 , 0x00 , 0x02 , 0x00 ] ) ) {
return {
ext : 'cur' ,
mime : 'image/x-icon' ,
} ;
}
if ( this . check ( [ 0xD0 , 0xCF , 0x11 , 0xE0 , 0xA1 , 0xB1 , 0x1A , 0xE1 ] ) ) {
// Detected Microsoft Compound File Binary File (MS-CFB) Format.
return {
ext : 'cfb' ,
mime : 'application/x-cfb' ,
} ;
}
// Increase sample size from 12 to 256.
await tokenizer . peekBuffer ( this . buffer , { length : Math . min ( 256 , tokenizer . fileInfo . size ) , mayBeLess : true } ) ;
2026-05-06 17:32:44 +08:00
if ( this . check ( [ 0x61 , 0x63 , 0x73 , 0x70 ] , { offset : 36 } ) ) {
return {
ext : 'icc' ,
mime : 'application/vnd.iccprofile' ,
} ;
}
2025-08-02 12:09:34 +08:00
// -- 15-byte signatures --
if ( this . checkString ( 'BEGIN:' ) ) {
if ( this . checkString ( 'VCARD' , { offset : 6 } ) ) {
return {
ext : 'vcf' ,
mime : 'text/vcard' ,
} ;
}
if ( this . checkString ( 'VCALENDAR' , { offset : 6 } ) ) {
return {
ext : 'ics' ,
mime : 'text/calendar' ,
} ;
}
}
// `raf` is here just to keep all the raw image detectors together.
if ( this . checkString ( 'FUJIFILMCCD-RAW' ) ) {
return {
ext : 'raf' ,
mime : 'image/x-fujifilm-raf' ,
} ;
}
if ( this . checkString ( 'Extended Module:' ) ) {
return {
ext : 'xm' ,
mime : 'audio/x-xm' ,
} ;
}
if ( this . checkString ( 'Creative Voice File' ) ) {
return {
ext : 'voc' ,
mime : 'audio/x-voc' ,
} ;
}
if ( this . check ( [ 0x04 , 0x00 , 0x00 , 0x00 ] ) && this . buffer . length >= 16 ) { // Rough & quick check Pickle/ASAR
const jsonSize = this . buffer . readUInt32LE ( 12 ) ;
if ( jsonSize > 12 && this . buffer . length >= jsonSize + 16 ) {
try {
const header = this . buffer . slice ( 16 , jsonSize + 16 ) . toString ( ) ;
const json = JSON . parse ( header ) ;
// Check if Pickle is ASAR
if ( json . files ) { // Final check, assuring Pickle/ASAR format
return {
ext : 'asar' ,
mime : 'application/x-asar' ,
} ;
}
} catch { }
}
}
if ( this . check ( [ 0x06 , 0x0E , 0x2B , 0x34 , 0x02 , 0x05 , 0x01 , 0x01 , 0x0D , 0x01 , 0x02 , 0x01 , 0x01 , 0x02 ] ) ) {
return {
ext : 'mxf' ,
mime : 'application/mxf' ,
} ;
}
if ( this . checkString ( 'SCRM' , { offset : 44 } ) ) {
return {
ext : 's3m' ,
mime : 'audio/x-s3m' ,
} ;
}
// Raw MPEG-2 transport stream (188-byte packets)
if ( this . check ( [ 0x47 ] ) && this . check ( [ 0x47 ] , { offset : 188 } ) ) {
return {
ext : 'mts' ,
mime : 'video/mp2t' ,
} ;
}
// Blu-ray Disc Audio-Video (BDAV) MPEG-2 transport stream has 4-byte TP_extra_header before each 188-byte packet
if ( this . check ( [ 0x47 ] , { offset : 4 } ) && this . check ( [ 0x47 ] , { offset : 196 } ) ) {
return {
ext : 'mts' ,
mime : 'video/mp2t' ,
} ;
}
if ( this . check ( [ 0x42 , 0x4F , 0x4F , 0x4B , 0x4D , 0x4F , 0x42 , 0x49 ] , { offset : 60 } ) ) {
return {
ext : 'mobi' ,
mime : 'application/x-mobipocket-ebook' ,
} ;
}
if ( this . check ( [ 0x44 , 0x49 , 0x43 , 0x4D ] , { offset : 128 } ) ) {
return {
ext : 'dcm' ,
mime : 'application/dicom' ,
} ;
}
if ( this . check ( [ 0x4C , 0x00 , 0x00 , 0x00 , 0x01 , 0x14 , 0x02 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0xC0 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x46 ] ) ) {
return {
ext : 'lnk' ,
mime : 'application/x.ms.shortcut' , // Invented by us
} ;
}
if ( this . check ( [ 0x62 , 0x6F , 0x6F , 0x6B , 0x00 , 0x00 , 0x00 , 0x00 , 0x6D , 0x61 , 0x72 , 0x6B , 0x00 , 0x00 , 0x00 , 0x00 ] ) ) {
return {
ext : 'alias' ,
mime : 'application/x.apple.alias' , // Invented by us
} ;
}
2026-05-06 17:32:44 +08:00
if ( this . checkString ( 'Kaydara FBX Binary \u0000' ) ) {
return {
ext : 'fbx' ,
mime : 'application/x.autodesk.fbx' , // Invented by us
} ;
}
2025-08-02 12:09:34 +08:00
if (
this . check ( [ 0x4C , 0x50 ] , { offset : 34 } )
&& (
this . check ( [ 0x00 , 0x00 , 0x01 ] , { offset : 8 } )
|| this . check ( [ 0x01 , 0x00 , 0x02 ] , { offset : 8 } )
|| this . check ( [ 0x02 , 0x00 , 0x02 ] , { offset : 8 } )
)
) {
return {
ext : 'eot' ,
mime : 'application/vnd.ms-fontobject' ,
} ;
}
if ( this . check ( [ 0x06 , 0x06 , 0xED , 0xF5 , 0xD8 , 0x1D , 0x46 , 0xE5 , 0xBD , 0x31 , 0xEF , 0xE7 , 0xFE , 0x74 , 0xB7 , 0x1D ] ) ) {
return {
ext : 'indd' ,
mime : 'application/x-indesign' ,
} ;
}
// Increase sample size from 256 to 512
await tokenizer . peekBuffer ( this . buffer , { length : Math . min ( 512 , tokenizer . fileInfo . size ) , mayBeLess : true } ) ;
// Requires a buffer size of 512 bytes
if ( tarHeaderChecksumMatches ( this . buffer ) ) {
return {
ext : 'tar' ,
mime : 'application/x-tar' ,
} ;
}
if ( this . check ( [ 0xFF , 0xFE ] ) ) { // UTF-16-BOM-BE
if ( this . check ( [ 60 , 0 , 63 , 0 , 120 , 0 , 109 , 0 , 108 , 0 ] , { offset : 2 } ) ) {
return {
ext : 'xml' ,
mime : 'application/xml' ,
} ;
}
if ( this . check ( [ 0xFF , 0x0E , 0x53 , 0x00 , 0x6B , 0x00 , 0x65 , 0x00 , 0x74 , 0x00 , 0x63 , 0x00 , 0x68 , 0x00 , 0x55 , 0x00 , 0x70 , 0x00 , 0x20 , 0x00 , 0x4D , 0x00 , 0x6F , 0x00 , 0x64 , 0x00 , 0x65 , 0x00 , 0x6C , 0x00 ] , { offset : 2 } ) ) {
return {
ext : 'skp' ,
mime : 'application/vnd.sketchup.skp' ,
} ;
}
return undefined ; // Some text based format
}
if ( this . checkString ( '-----BEGIN PGP MESSAGE-----' ) ) {
return {
ext : 'pgp' ,
mime : 'application/pgp-encrypted' ,
} ;
}
// Check MPEG 1 or 2 Layer 3 header, or 'layer 0' for ADTS (MPEG sync-word 0xFFE)
if ( this . buffer . length >= 2 && this . check ( [ 0xFF , 0xE0 ] , { offset : 0 , mask : [ 0xFF , 0xE0 ] } ) ) {
if ( this . check ( [ 0x10 ] , { offset : 1 , mask : [ 0x16 ] } ) ) {
// Check for (ADTS) MPEG-2
if ( this . check ( [ 0x08 ] , { offset : 1 , mask : [ 0x08 ] } ) ) {
return {
ext : 'aac' ,
mime : 'audio/aac' ,
} ;
}
// Must be (ADTS) MPEG-4
return {
ext : 'aac' ,
mime : 'audio/aac' ,
} ;
}
// MPEG 1 or 2 Layer 3 header
// Check for MPEG layer 3
if ( this . check ( [ 0x02 ] , { offset : 1 , mask : [ 0x06 ] } ) ) {
return {
ext : 'mp3' ,
mime : 'audio/mpeg' ,
} ;
}
// Check for MPEG layer 2
if ( this . check ( [ 0x04 ] , { offset : 1 , mask : [ 0x06 ] } ) ) {
return {
ext : 'mp2' ,
mime : 'audio/mpeg' ,
} ;
}
// Check for MPEG layer 1
if ( this . check ( [ 0x06 ] , { offset : 1 , mask : [ 0x06 ] } ) ) {
return {
ext : 'mp1' ,
mime : 'audio/mpeg' ,
} ;
}
}
}
async readTiffTag ( bigEndian ) {
const tagId = await this . tokenizer . readToken ( bigEndian ? UINT16 _BE : UINT16 _LE ) ;
this . tokenizer . ignore ( 10 ) ;
switch ( tagId ) {
case 50_341 :
return {
ext : 'arw' ,
mime : 'image/x-sony-arw' ,
} ;
case 50_706 :
return {
ext : 'dng' ,
mime : 'image/x-adobe-dng' ,
} ;
}
}
async readTiffIFD ( bigEndian ) {
const numberOfTags = await this . tokenizer . readToken ( bigEndian ? UINT16 _BE : UINT16 _LE ) ;
for ( let n = 0 ; n < numberOfTags ; ++ n ) {
const fileType = await this . readTiffTag ( bigEndian ) ;
if ( fileType ) {
return fileType ;
}
}
}
async readTiffHeader ( bigEndian ) {
const version = ( bigEndian ? UINT16 _BE : UINT16 _LE ) . get ( this . buffer , 2 ) ;
const ifdOffset = ( bigEndian ? UINT32 _BE : UINT32 _LE ) . get ( this . buffer , 4 ) ;
if ( version === 42 ) {
// TIFF file header
if ( ifdOffset >= 6 ) {
if ( this . checkString ( 'CR' , { offset : 8 } ) ) {
return {
ext : 'cr2' ,
mime : 'image/x-canon-cr2' ,
} ;
}
if ( ifdOffset >= 8 && ( this . check ( [ 0x1C , 0x00 , 0xFE , 0x00 ] , { offset : 8 } ) || this . check ( [ 0x1F , 0x00 , 0x0B , 0x00 ] , { offset : 8 } ) ) ) {
return {
ext : 'nef' ,
mime : 'image/x-nikon-nef' ,
} ;
}
}
await this . tokenizer . ignore ( ifdOffset ) ;
const fileType = await this . readTiffIFD ( bigEndian ) ;
return fileType ? ? {
ext : 'tif' ,
mime : 'image/tiff' ,
} ;
}
if ( version === 43 ) { // Big TIFF file header
return {
ext : 'tif' ,
mime : 'image/tiff' ,
} ;
}
}
}
2026-05-06 17:32:44 +08:00
new Set ( extensions ) ;
new Set ( mimeTypes ) ;
2025-08-02 12:09:34 +08:00
2026-05-06 17:32:44 +08:00
const imageExtensions = new Set ( [
'jpg' ,
'png' ,
'gif' ,
'webp' ,
'flif' ,
'cr2' ,
'tif' ,
'bmp' ,
'jxr' ,
'psd' ,
'ico' ,
'bpg' ,
'jp2' ,
'jpm' ,
'jpx' ,
'heic' ,
'cur' ,
'dcm' ,
'avif' ,
] ) ;
2025-08-02 12:09:34 +08:00
2026-05-06 17:32:44 +08:00
async function imageType ( input ) {
const result = await fileTypeFromBuffer ( input ) ;
return imageExtensions . has ( result ? . ext ) && result ;
}
2025-08-02 12:09:34 +08:00
// العربية
var ar = { } ;
// čeština
var cz = { } ;
// Dansk
var da = { } ;
// Deutsch
var de = { } ;
// English
var en = {
// setting.ts
"Plugin Settings" : "Plugin Settings" ,
"Auto pasted upload" : "Auto pasted upload" ,
"If you set this value true, when you paste image, it will be auto uploaded(you should set the picGo server rightly)" : "If you set this value true, when you paste image, it will be auto uploaded(you should set the picGo server rightly)" ,
"Default uploader" : "Default uploader" ,
"PicGo server" : "PicGo server upload route" ,
"PicGo server desc" : "upload route, use PicList will be able to set picbed and config through query" ,
"Please input PicGo server" : "Please input upload route" ,
"PicGo delete server" : "PicGo server delete route(you need to use PicList app)" ,
"PicList desc" : "Search PicList on Github to download and install" ,
"Please input PicGo delete server" : "Please input delete server" ,
"Delete image using PicList" : "Delete image using PicList" ,
"PicGo-Core path" : "PicGo-Core path" ,
"Delete successfully" : "Delete successfully" ,
"Delete failed" : "Delete failed" ,
"Image size suffix" : "Image size suffix" ,
"Image size suffix Description" : "like |300 for resize image in ob." ,
"Please input image size suffix" : "Please input image size suffix" ,
"Error, could not delete" : "Error, could not delete" ,
"Please input PicGo-Core path, default using environment variables" : "Please input PicGo-Core path, default using environment variables" ,
"Work on network" : "Work on network" ,
"Work on network Description" : "Allow upload network image by 'Upload all' command.\n Or when you paste, md standard image link in your clipboard will be auto upload." ,
fixPath : "fixPath" ,
fixPathWarning : "This option is used to fix PicGo-core upload failures on Linux and Mac. It modifies the PATH variable within Obsidian. If Obsidian encounters any bugs, turn off the option, try again! " ,
"Upload when clipboard has image and text together" : "Upload when clipboard has image and text together" ,
"When you copy, some application like Excel will image and text to clipboard, you can upload or not." : "When you copy, some application like Excel will image and text to clipboard, you can upload or not." ,
"Network Domain Black List" : "Network Domain Black List" ,
"Network Domain Black List Description" : "Image in the domain list will not be upload,use comma separated" ,
"Delete source file after you upload file" : "Delete source file after you upload file" ,
"Delete source file in ob assets after you upload file." : "Delete source file in ob assets after you upload file." ,
"Image desc" : "Image desc" ,
reserve : "default" ,
"remove all" : "none" ,
"remove default" : "remove image.png" ,
"Remote server mode" : "Remote server mode" ,
"Remote server mode desc" : "If you have deployed piclist-core or piclist on the server." ,
"Can not find image file" : "Can not find image file" ,
"File has been changedd, upload failure" : "File has been changedd, upload failure" ,
"File has been changedd, download failure" : "File has been changedd, download failure" ,
"Warning: upload files is different of reciver files from api" : "Warning: upload files num is different of reciver files from api" ,
} ;
// British English
var enGB = { } ;
// Español
var es = { } ;
// français
var fr = { } ;
// हिन्दी
var hi = { } ;
// Bahasa Indonesia
var id = { } ;
// Italiano
var it = { } ;
// 日本語
var ja = { } ;
// 한국어
var ko = { } ;
// Nederlands
var nl = { } ;
// Norsk
var no = { } ;
// język polski
var pl = { } ;
// Português
var pt = { } ;
// Português do Brasil
// Brazilian Portuguese
var ptBR = { } ;
// Română
var ro = { } ;
// русский
var ru = { } ;
// Türkçe
var tr = { } ;
// 简体中文
var zhCN = {
// setting.ts
"Plugin Settings" : "插件设置" ,
"Auto pasted upload" : "剪切板自动上传" ,
"If you set this value true, when you paste image, it will be auto uploaded(you should set the picGo server rightly)" : "启用该选项后, 黏贴图片时会自动上传( 你需要正确配置picgo) " ,
"Default uploader" : "默认上传器" ,
"PicGo server" : "PicGo server 上传接口" ,
"PicGo server desc" : "上传接口, 使用PicList时可通过设置URL参数指定图床和配置" ,
"Please input PicGo server" : "请输入上传接口地址" ,
"PicGo delete server" : "PicGo server 删除接口(请使用PicList来启用此功能)" ,
"PicList desc" : "PicList是PicGo二次开发版, 请Github搜索PicList下载" ,
"Please input PicGo delete server" : "请输入删除接口地址" ,
"Delete image using PicList" : "使用 PicList 删除图片" ,
"PicGo-Core path" : "PicGo-Core 路径" ,
"Delete successfully" : "删除成功" ,
"Delete failed" : "删除失败" ,
"Error, could not delete" : "错误,无法删除" ,
"Image size suffix" : "图片大小后缀" ,
"Image size suffix Description" : "比如:|300 用于调整图片大小" ,
"Please input image size suffix" : "请输入图片大小后缀" ,
"Please input PicGo-Core path, default using environment variables" : "请输入 PicGo-Core path, 默认使用环境变量" ,
"Work on network" : "应用网络图片" ,
"Work on network Description" : "当你上传所有图片时,也会上传网络图片。以及当你进行黏贴时,剪切板中的标准 md 图片会被上传" ,
fixPath : "修正PATH变量" ,
fixPathWarning : "此选项用于修复Linux和Mac上 PicGo-Core 上传失败的问题。它会修改 Obsidian 内的 PATH 变量,如果 Obsidian 遇到任何BUG, 先关闭这个选项试试! " ,
"Upload when clipboard has image and text together" : "当剪切板同时拥有文本和图片剪切板数据时是否上传图片" ,
"When you copy, some application like Excel will image and text to clipboard, you can upload or not." : "当你复制时,某些应用例如 Excel 会在剪切板同时文本和图像数据,确认是否上传。" ,
"Network Domain Black List" : "网络图片域名黑名单" ,
"Network Domain Black List Description" : "黑名单域名中的图片将不会被上传,用英文逗号分割" ,
"Delete source file after you upload file" : "上传文件后移除源文件" ,
"Delete source file in ob assets after you upload file." : "上传文件后移除在ob附件文件夹中的文件" ,
"Image desc" : "图片描述" ,
reserve : "默认" ,
"remove all" : "无" ,
"remove default" : "移除image.png" ,
"Remote server mode" : "远程服务器模式" ,
"Remote server mode desc" : "如果你在服务器部署了piclist-core或者piclist" ,
"Can not find image file" : "没有解析到图像文件" ,
"File has been changedd, upload failure" : "当前文件已变更,上传失败" ,
"File has been changedd, download failure" : "当前文件已变更,下载失败" ,
"Warning: upload files is different of reciver files from api" : "警告:上传的文件与接口返回的文件数量不一致" ,
} ;
// 繁體中文
var zhTW = { } ;
2026-05-06 17:32:44 +08:00
const localeMap = {
ar ,
2025-08-02 12:09:34 +08:00
cs : cz ,
2026-05-06 17:32:44 +08:00
da ,
de ,
en ,
2025-08-02 12:09:34 +08:00
'en-gb' : enGB ,
2026-05-06 17:32:44 +08:00
es ,
fr ,
hi ,
id ,
it ,
ja ,
ko ,
nl ,
2025-08-02 12:09:34 +08:00
nn : no ,
2026-05-06 17:32:44 +08:00
pl ,
pt ,
2025-08-02 12:09:34 +08:00
'pt-br' : ptBR ,
2026-05-06 17:32:44 +08:00
ro ,
ru ,
tr ,
2025-08-02 12:09:34 +08:00
'zh-cn' : zhCN ,
'zh-tw' : zhTW ,
} ;
2026-05-06 17:32:44 +08:00
const locale = localeMap [ obsidian . moment . locale ( ) ] ;
2025-08-02 12:09:34 +08:00
function t ( str ) {
return ( locale && locale [ str ] ) || en [ str ] ;
}
2026-05-06 17:32:44 +08:00
async function downloadAllImageFiles ( plugin ) {
const activeFile = plugin . app . workspace . getActiveFile ( ) ;
const folderPath = await plugin . app . fileManager . getAvailablePathForAttachment ( "" ) ;
const fileArray = plugin . helper . getAllFiles ( ) ;
if ( ! ( await plugin . app . vault . adapter . exists ( folderPath ) ) ) {
await plugin . app . vault . adapter . mkdir ( folderPath ) ;
}
let imageArray = [ ] ;
for ( const file of fileArray ) {
if ( ! file . path . startsWith ( "http" ) ) {
continue ;
}
const url = file . path ;
const asset = getUrlAsset ( url ) ;
let name = decodeURI ( pathBrowserify . parse ( asset ) . name ) . replaceAll ( /[\\\\/:*?\"<>|]/g , "-" ) ;
const response = await download ( plugin , url , folderPath , name ) ;
if ( response . ok ) {
const activeFolder = plugin . app . workspace . getActiveFile ( ) . parent . path ;
imageArray . push ( {
source : file . source ,
name : name ,
path : obsidian . normalizePath ( pathBrowserify . relative ( obsidian . normalizePath ( activeFolder ) , obsidian . normalizePath ( response . path ) ) ) ,
} ) ;
}
}
let value = plugin . helper . getValue ( ) ;
imageArray . map ( image => {
let name = plugin . handleName ( image . name ) ;
value = value . replace ( image . source , `  } ) ` ) ;
} ) ;
const currentFile = plugin . app . workspace . getActiveFile ( ) ;
if ( activeFile . path !== currentFile . path ) {
new obsidian . Notice ( t ( "File has been changedd, download failure" ) ) ;
return ;
}
plugin . helper . setValue ( value ) ;
new obsidian . Notice ( ` all: ${ fileArray . length } \n success: ${ imageArray . length } \n failed: ${ fileArray . length - imageArray . length } ` ) ;
}
async function download ( plugin , url , folderPath , name ) {
const response = await obsidian . requestUrl ( { url } ) ;
if ( response . status !== 200 ) {
return {
ok : false ,
msg : "error" ,
} ;
}
const type = await imageType ( new Uint8Array ( response . arrayBuffer ) ) ;
if ( ! type ) {
return {
ok : false ,
msg : "error" ,
} ;
}
try {
let path = obsidian . normalizePath ( pathBrowserify . join ( folderPath , ` ${ name } . ${ type . ext } ` ) ) ;
// 如果文件名已存在,则用随机值替换,不对文件后缀进行判断
if ( await plugin . app . vault . adapter . exists ( path ) ) {
path = obsidian . normalizePath ( pathBrowserify . join ( folderPath , ` ${ uuid ( ) } . ${ type . ext } ` ) ) ;
}
plugin . app . vault . adapter . writeBinary ( path , response . arrayBuffer ) ;
return {
ok : true ,
msg : "ok" ,
path : path ,
type ,
} ;
}
catch ( err ) {
return {
ok : false ,
msg : err ,
} ;
}
}
class PicGoUploader {
settings ;
plugin ;
constructor ( settings , plugin ) {
this . settings = settings ;
this . plugin = plugin ;
}
async uploadFiles ( fileList ) {
let response ;
let data ;
if ( this . settings . remoteServerMode ) {
const files = [ ] ;
for ( let i = 0 ; i < fileList . length ; i ++ ) {
const file = fileList [ i ] ;
const buffer = await new Promise ( ( resolve , reject ) => {
require$$0 . readFile ( file , ( err , data ) => {
if ( err ) {
reject ( err ) ;
}
resolve ( data ) ;
} ) ;
} ) ;
const arrayBuffer = bufferToArrayBuffer ( buffer ) ;
files . push ( new File ( [ arrayBuffer ] , file ) ) ;
}
response = await this . uploadFileByData ( files ) ;
data = await response . json ( ) ;
}
else {
response = await obsidian . requestUrl ( {
url : this . settings . uploadServer ,
method : "POST" ,
headers : { "Content-Type" : "application/json" } ,
body : JSON . stringify ( { list : fileList } ) ,
} ) ;
data = await response . json ;
}
// piclist
if ( data . fullResult ) {
const uploadUrlFullResultList = data . fullResult || [ ] ;
this . settings . uploadedImages = [
... ( this . settings . uploadedImages || [ ] ) ,
... uploadUrlFullResultList ,
] ;
}
return data ;
}
async uploadFileByData ( fileList ) {
const form = new FormData ( ) ;
for ( let i = 0 ; i < fileList . length ; i ++ ) {
form . append ( "list" , fileList [ i ] ) ;
}
const options = {
method : "post" ,
body : form ,
} ;
const response = await fetch ( this . settings . uploadServer , options ) ;
console . log ( "response" , response ) ;
return response ;
}
async uploadFileByClipboard ( fileList ) {
let data ;
let res ;
if ( this . settings . remoteServerMode ) {
res = await this . uploadFileByData ( fileList ) ;
data = await res . json ( ) ;
}
else {
res = await obsidian . requestUrl ( {
url : this . settings . uploadServer ,
method : "POST" ,
} ) ;
data = await res . json ;
}
if ( res . status !== 200 ) {
return {
code : - 1 ,
msg : data . msg ,
data : "" ,
} ;
}
// piclist
if ( data . fullResult ) {
const uploadUrlFullResultList = data . fullResult || [ ] ;
this . settings . uploadedImages = [
... ( this . settings . uploadedImages || [ ] ) ,
... uploadUrlFullResultList ,
] ;
this . plugin . saveSettings ( ) ;
}
return {
code : 0 ,
msg : "success" ,
data : typeof data . result == "string" ? data . result : data . result [ 0 ] ,
} ;
}
}
class PicGoCoreUploader {
settings ;
plugin ;
constructor ( settings , plugin ) {
this . settings = settings ;
this . plugin = plugin ;
}
async uploadFiles ( fileList ) {
const length = fileList . length ;
let cli = this . settings . picgoCorePath || "picgo" ;
let command = ` ${ cli } upload ${ fileList
. map ( item => ` " ${ item } " ` )
. join ( " " ) } ` ;
const res = await this . exec ( command ) ;
const splitList = res . split ( "\n" ) ;
const splitListLength = splitList . length ;
const data = splitList . splice ( splitListLength - 1 - length , length ) ;
if ( res . includes ( "PicGo ERROR" ) ) {
console . log ( command , res ) ;
return {
success : false ,
msg : "失败" ,
} ;
}
else {
return {
success : true ,
result : data ,
} ;
}
// {success:true,result:[]}
}
// PicGo-Core 上传处理
async uploadFileByClipboard ( ) {
const res = await this . uploadByClip ( ) ;
const splitList = res . split ( "\n" ) ;
const lastImage = getLastImage ( splitList ) ;
if ( lastImage ) {
return {
code : 0 ,
msg : "success" ,
data : lastImage ,
} ;
}
else {
console . log ( splitList ) ;
// new Notice(`"Please check PicGo-Core config"\n${res}`);
return {
code : - 1 ,
msg : ` "Please check PicGo-Core config" \n ${ res } ` ,
data : "" ,
} ;
}
}
// PicGo-Core的剪切上传反馈
async uploadByClip ( ) {
let command ;
if ( this . settings . picgoCorePath ) {
command = ` ${ this . settings . picgoCorePath } upload ` ;
}
else {
command = ` picgo upload ` ;
}
const res = await this . exec ( command ) ;
// const res = await this.spawnChild();
return res ;
}
async exec ( command ) {
let { stdout } = await require$$0$2 . exec ( command ) ;
const res = await streamToString ( stdout ) ;
return res ;
}
async spawnChild ( ) {
const { spawn } = require ( "child_process" ) ;
const child = spawn ( "picgo" , [ "upload" ] , {
shell : true ,
} ) ;
let data = "" ;
for await ( const chunk of child . stdout ) {
data += chunk ;
}
let error = "" ;
for await ( const chunk of child . stderr ) {
error += chunk ;
}
const exitCode = await new Promise ( ( resolve , reject ) => {
child . on ( "close" , resolve ) ;
} ) ;
if ( exitCode ) {
throw new Error ( ` subprocess error exit ${ exitCode } , ${ error } ` ) ;
}
return data ;
}
}
class PicGoDeleter {
plugin ;
constructor ( plugin ) {
this . plugin = plugin ;
}
async deleteImage ( configMap ) {
const response = await obsidian . requestUrl ( {
url : this . plugin . settings . deleteServer ,
method : "POST" ,
headers : { "Content-Type" : "application/json" } ,
body : JSON . stringify ( {
list : configMap ,
} ) ,
} ) ;
const data = response . json ;
return data ;
}
}
//  local image should has ext, support , support 
//  internet image should not has ext
const REGEX _FILE = /\!\[(.*?)\]\(<(\S+\.\w+)>\)|\!\[(.*?)\]\((\S+\.\w+)(?:\s+"[^"]*")?\)|\!\[(.*?)\]\((https?:\/\/.*?)\)/g ;
const REGEX _WIKI _FILE = /\!\[\[(.*?)(\s*?\|.*?)?\]\]/g ;
class Helper {
app ;
constructor ( app ) {
this . app = app ;
}
getFrontmatterValue ( key , defaultValue = undefined ) {
const file = this . app . workspace . getActiveFile ( ) ;
if ( ! file ) {
return undefined ;
}
const path = file . path ;
const cache = this . app . metadataCache . getCache ( path ) ;
let value = defaultValue ;
if ( cache ? . frontmatter && cache . frontmatter . hasOwnProperty ( key ) ) {
value = cache . frontmatter [ key ] ;
}
return value ;
}
getEditor ( ) {
const mdView = this . app . workspace . getActiveViewOfType ( obsidian . MarkdownView ) ;
if ( mdView ) {
return mdView . editor ;
}
else {
return null ;
}
}
getValue ( ) {
const editor = this . getEditor ( ) ;
return editor . getValue ( ) ;
}
setValue ( value ) {
const editor = this . getEditor ( ) ;
const { left , top } = editor . getScrollInfo ( ) ;
const position = editor . getCursor ( ) ;
editor . setValue ( value ) ;
editor . scrollTo ( left , top ) ;
editor . setCursor ( position ) ;
}
// get all file urls, include local and internet
getAllFiles ( ) {
const editor = this . getEditor ( ) ;
let value = editor . getValue ( ) ;
return this . getImageLink ( value ) ;
}
getImageLink ( value ) {
const matches = value . matchAll ( REGEX _FILE ) ;
const WikiMatches = value . matchAll ( REGEX _WIKI _FILE ) ;
let fileArray = [ ] ;
for ( const match of matches ) {
const source = match [ 0 ] ;
let name = match [ 1 ] ;
let path = match [ 2 ] ;
if ( name === undefined ) {
name = match [ 3 ] ;
}
if ( path === undefined ) {
path = match [ 4 ] ;
}
fileArray . push ( {
path : path ,
name : name ,
source : source ,
} ) ;
}
for ( const match of WikiMatches ) {
let name = pathBrowserify . parse ( match [ 1 ] ) . name ;
const path = match [ 1 ] ;
const source = match [ 0 ] ;
if ( match [ 2 ] ) {
name = ` ${ name } ${ match [ 2 ] } ` ;
}
fileArray . push ( {
path : path ,
name : name ,
source : source ,
} ) ;
}
return fileArray ;
}
hasBlackDomain ( src , blackDomains ) {
if ( blackDomains . trim ( ) === "" ) {
return false ;
}
const blackDomainList = blackDomains . split ( "," ) . filter ( item => item !== "" ) ;
let url = new URL ( src ) ;
const domain = url . hostname ;
return blackDomainList . some ( blackDomain => domain . includes ( blackDomain ) ) ;
}
}
const DEFAULT _SETTINGS = {
2025-08-02 12:09:34 +08:00
uploadByClipSwitch : true ,
uploader : "PicGo" ,
uploadServer : "http://127.0.0.1:36677/upload" ,
deleteServer : "http://127.0.0.1:36677/delete" ,
imageSizeSuffix : "" ,
picgoCorePath : "" ,
workOnNetWork : false ,
fixPath : false ,
applyImage : true ,
newWorkBlackDomains : "" ,
deleteSource : false ,
imageDesc : "origin" ,
remoteServerMode : false ,
} ;
2026-05-06 17:32:44 +08:00
class SettingTab extends obsidian . PluginSettingTab {
plugin ;
constructor ( app , plugin ) {
super ( app , plugin ) ;
this . plugin = plugin ;
}
display ( ) {
let { containerEl } = this ;
const os = getOS ( ) ;
2025-08-02 12:09:34 +08:00
containerEl . empty ( ) ;
containerEl . createEl ( "h2" , { text : t ( "Plugin Settings" ) } ) ;
new obsidian . Setting ( containerEl )
. setName ( t ( "Auto pasted upload" ) )
. setDesc ( t ( "If you set this value true, when you paste image, it will be auto uploaded(you should set the picGo server rightly)" ) )
2026-05-06 17:32:44 +08:00
. addToggle ( toggle => toggle
. setValue ( this . plugin . settings . uploadByClipSwitch )
. onChange ( async ( value ) => {
this . plugin . settings . uploadByClipSwitch = value ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
new obsidian . Setting ( containerEl )
. setName ( t ( "Default uploader" ) )
. setDesc ( t ( "Default uploader" ) )
2026-05-06 17:32:44 +08:00
. addDropdown ( cb => cb
. addOption ( "PicGo" , "PicGo(app)" )
. addOption ( "PicGo-Core" , "PicGo-Core" )
. setValue ( this . plugin . settings . uploader )
. onChange ( async ( value ) => {
this . plugin . settings . uploader = value ;
this . display ( ) ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
if ( this . plugin . settings . uploader === "PicGo" ) {
new obsidian . Setting ( containerEl )
. setName ( t ( "PicGo server" ) )
. setDesc ( t ( "PicGo server desc" ) )
2026-05-06 17:32:44 +08:00
. addText ( text => text
. setPlaceholder ( t ( "Please input PicGo server" ) )
. setValue ( this . plugin . settings . uploadServer )
. onChange ( async ( key ) => {
this . plugin . settings . uploadServer = key ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
new obsidian . Setting ( containerEl )
. setName ( t ( "PicGo delete server" ) )
. setDesc ( t ( "PicList desc" ) )
2026-05-06 17:32:44 +08:00
. addText ( text => text
. setPlaceholder ( t ( "Please input PicGo delete server" ) )
. setValue ( this . plugin . settings . deleteServer )
. onChange ( async ( key ) => {
this . plugin . settings . deleteServer = key ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
}
new obsidian . Setting ( containerEl )
. setName ( t ( "Remote server mode" ) )
. setDesc ( t ( "Remote server mode desc" ) )
2026-05-06 17:32:44 +08:00
. addToggle ( toggle => toggle
. setValue ( this . plugin . settings . remoteServerMode )
. onChange ( async ( value ) => {
this . plugin . settings . remoteServerMode = value ;
if ( value ) {
this . plugin . settings . workOnNetWork = false ;
}
this . display ( ) ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
if ( this . plugin . settings . uploader === "PicGo-Core" ) {
new obsidian . Setting ( containerEl )
. setName ( t ( "PicGo-Core path" ) )
. setDesc ( t ( "Please input PicGo-Core path, default using environment variables" ) )
2026-05-06 17:32:44 +08:00
. addText ( text => text
. setPlaceholder ( "" )
. setValue ( this . plugin . settings . picgoCorePath )
. onChange ( async ( value ) => {
this . plugin . settings . picgoCorePath = value ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
if ( os !== "Windows" ) {
new obsidian . Setting ( containerEl )
. setName ( t ( "fixPath" ) )
. setDesc ( t ( "fixPathWarning" ) )
2026-05-06 17:32:44 +08:00
. addToggle ( toggle => toggle
. setValue ( this . plugin . settings . fixPath )
. onChange ( async ( value ) => {
this . plugin . settings . fixPath = value ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
}
}
// image desc setting
new obsidian . Setting ( containerEl )
. setName ( t ( "Image desc" ) )
. setDesc ( t ( "Image desc" ) )
2026-05-06 17:32:44 +08:00
. addDropdown ( cb => cb
. addOption ( "origin" , t ( "reserve" ) ) // 保留全部
. addOption ( "none" , t ( "remove all" ) ) // 移除全部
. addOption ( "removeDefault" , t ( "remove default" ) ) // 只移除默认即 image.png
. setValue ( this . plugin . settings . imageDesc )
. onChange ( async ( value ) => {
this . plugin . settings . imageDesc = value ;
this . display ( ) ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
new obsidian . Setting ( containerEl )
. setName ( t ( "Image size suffix" ) )
. setDesc ( t ( "Image size suffix Description" ) )
2026-05-06 17:32:44 +08:00
. addText ( text => text
. setPlaceholder ( t ( "Please input image size suffix" ) )
. setValue ( this . plugin . settings . imageSizeSuffix )
. onChange ( async ( key ) => {
this . plugin . settings . imageSizeSuffix = key ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
new obsidian . Setting ( containerEl )
. setName ( t ( "Work on network" ) )
. setDesc ( t ( "Work on network Description" ) )
2026-05-06 17:32:44 +08:00
. addToggle ( toggle => toggle
. setValue ( this . plugin . settings . workOnNetWork )
. onChange ( async ( value ) => {
if ( this . plugin . settings . remoteServerMode ) {
new obsidian . Notice ( "Can only work when remote server mode is off." ) ;
this . plugin . settings . workOnNetWork = false ;
}
else {
this . plugin . settings . workOnNetWork = value ;
}
this . display ( ) ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
new obsidian . Setting ( containerEl )
. setName ( t ( "Network Domain Black List" ) )
. setDesc ( t ( "Network Domain Black List Description" ) )
2026-05-06 17:32:44 +08:00
. addTextArea ( textArea => textArea
. setValue ( this . plugin . settings . newWorkBlackDomains )
. onChange ( async ( value ) => {
this . plugin . settings . newWorkBlackDomains = value ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
new obsidian . Setting ( containerEl )
. setName ( t ( "Upload when clipboard has image and text together" ) )
. setDesc ( t ( "When you copy, some application like Excel will image and text to clipboard, you can upload or not." ) )
2026-05-06 17:32:44 +08:00
. addToggle ( toggle => toggle
. setValue ( this . plugin . settings . applyImage )
. onChange ( async ( value ) => {
this . plugin . settings . applyImage = value ;
this . display ( ) ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
2025-08-02 12:09:34 +08:00
new obsidian . Setting ( containerEl )
. setName ( t ( "Delete source file after you upload file" ) )
. setDesc ( t ( "Delete source file in ob assets after you upload file." ) )
2026-05-06 17:32:44 +08:00
. addToggle ( toggle => toggle
. setValue ( this . plugin . settings . deleteSource )
. onChange ( async ( value ) => {
this . plugin . settings . deleteSource = value ;
this . display ( ) ;
await this . plugin . saveSettings ( ) ;
} ) ) ;
}
}
class imageAutoUploadPlugin extends obsidian . Plugin {
settings ;
helper ;
editor ;
picGoUploader ;
picGoDeleter ;
picGoCoreUploader ;
uploader ;
async loadSettings ( ) {
this . settings = Object . assign ( DEFAULT _SETTINGS , await this . loadData ( ) ) ;
}
async saveSettings ( ) {
await this . saveData ( this . settings ) ;
}
onunload ( ) { }
async onload ( ) {
await this . loadSettings ( ) ;
this . helper = new Helper ( this . app ) ;
this . picGoUploader = new PicGoUploader ( this . settings , this ) ;
this . picGoDeleter = new PicGoDeleter ( this ) ;
this . picGoCoreUploader = new PicGoCoreUploader ( this . settings , this ) ;
if ( this . settings . uploader === "PicGo" ) {
this . uploader = this . picGoUploader ;
}
else if ( this . settings . uploader === "PicGo-Core" ) {
this . uploader = this . picGoCoreUploader ;
if ( this . settings . fixPath ) {
fixPath ( ) ;
}
}
else {
new obsidian . Notice ( "unknown uploader" ) ;
}
obsidian . addIcon ( "upload" , ` <svg t="1636630783429" class="icon" viewBox="0 0 100 100" version="1.1" p-id="4649" xmlns="http://www.w3.org/2000/svg">
< path d = "M 71.638 35.336 L 79.408 35.336 C 83.7 35.336 87.178 38.662 87.178 42.765 L 87.178 84.864 C 87.178 88.969 83.7 92.295 79.408 92.295 L 17.249 92.295 C 12.957 92.295 9.479 88.969 9.479 84.864 L 9.479 42.765 C 9.479 38.662 12.957 35.336 17.249 35.336 L 25.019 35.336 L 25.019 42.765 L 17.249 42.765 L 17.249 84.864 L 79.408 84.864 L 79.408 42.765 L 71.638 42.765 L 71.638 35.336 Z M 49.014 10.179 L 67.326 27.688 L 61.835 32.942 L 52.849 24.352 L 52.849 59.731 L 45.078 59.731 L 45.078 24.455 L 36.194 32.947 L 30.702 27.692 L 49.012 10.181 Z" p - id = "4650" fill = "#8a8a8a" > < / p a t h >
< / s v g > ` ) ;
this . addSettingTab ( new SettingTab ( this . app , this ) ) ;
this . addCommand ( {
id : "Upload all images" ,
name : "Upload all images" ,
checkCallback : ( checking ) => {
let leaf = this . app . workspace . activeLeaf ;
if ( leaf ) {
if ( ! checking ) {
this . uploadAllFile ( ) ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
return true ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
return false ;
} ,
2025-08-02 12:09:34 +08:00
} ) ;
2026-05-06 17:32:44 +08:00
this . addCommand ( {
id : "Download all images" ,
name : "Download all images" ,
checkCallback : ( checking ) => {
let leaf = this . app . workspace . activeLeaf ;
if ( leaf ) {
if ( ! checking ) {
downloadAllImageFiles ( this ) ;
}
return true ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
return false ;
} ,
2025-08-02 12:09:34 +08:00
} ) ;
2026-05-06 17:32:44 +08:00
this . setupPasteHandler ( ) ;
this . registerFileMenu ( ) ;
this . registerSelection ( ) ;
}
registerSelection ( ) {
this . registerEvent ( this . app . workspace . on ( "editor-menu" , ( menu , editor , info ) => {
if ( this . app . workspace . getLeavesOfType ( "markdown" ) . length === 0 ) {
2025-08-02 12:09:34 +08:00
return ;
}
2026-05-06 17:32:44 +08:00
const selection = editor . getSelection ( ) ;
2025-08-02 12:09:34 +08:00
if ( selection ) {
2026-05-06 17:32:44 +08:00
const markdownRegex = /!\[.*\]\((.*)\)/g ;
const markdownMatch = markdownRegex . exec ( selection ) ;
2025-08-02 12:09:34 +08:00
if ( markdownMatch && markdownMatch . length > 1 ) {
2026-05-06 17:32:44 +08:00
const markdownUrl = markdownMatch [ 1 ] ;
if ( this . settings . uploadedImages . find ( ( item ) => item . imgUrl === markdownUrl ) ) {
this . addMenu ( menu , markdownUrl , editor ) ;
2025-08-02 12:09:34 +08:00
}
}
}
} ) ) ;
2026-05-06 17:32:44 +08:00
}
addMenu = ( menu , imgPath , editor ) => {
menu . addItem ( ( item ) => item
. setIcon ( "trash-2" )
. setTitle ( t ( "Delete image using PicList" ) )
. onClick ( async ( ) => {
try {
const selectedItem = this . settings . uploadedImages . find ( ( item ) => item . imgUrl === imgPath ) ;
if ( selectedItem ) {
const res = await this . picGoDeleter . deleteImage ( [ selectedItem ] ) ;
if ( res . success ) {
new obsidian . Notice ( t ( "Delete successfully" ) ) ;
const selection = editor . getSelection ( ) ;
if ( selection ) {
editor . replaceSelection ( "" ) ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
this . settings . uploadedImages =
this . settings . uploadedImages . filter ( ( item ) => item . imgUrl !== imgPath ) ;
this . saveSettings ( ) ;
}
else {
new obsidian . Notice ( t ( "Delete failed" ) ) ;
}
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
}
catch {
new obsidian . Notice ( t ( "Error, could not delete" ) ) ;
}
} ) ) ;
2025-08-02 12:09:34 +08:00
} ;
2026-05-06 17:32:44 +08:00
registerFileMenu ( ) {
this . registerEvent ( this . app . workspace . on ( "file-menu" , ( menu , file , source , leaf ) => {
2025-08-02 12:09:34 +08:00
if ( source === "canvas-menu" )
return false ;
if ( ! isAssetTypeAnImage ( file . path ) )
return false ;
2026-05-06 17:32:44 +08:00
menu . addItem ( ( item ) => {
2025-08-02 12:09:34 +08:00
item
. setTitle ( "Upload" )
. setIcon ( "upload" )
2026-05-06 17:32:44 +08:00
. onClick ( ( ) => {
2025-08-02 12:09:34 +08:00
if ( ! ( file instanceof obsidian . TFile ) ) {
return false ;
}
2026-05-06 17:32:44 +08:00
this . fileMenuUpload ( file ) ;
2025-08-02 12:09:34 +08:00
} ) ;
} ) ;
} ) ) ;
2026-05-06 17:32:44 +08:00
}
fileMenuUpload ( file ) {
let content = this . helper . getValue ( ) ;
const basePath = this . app . vault . adapter . getBasePath ( ) ;
let imageList = [ ] ;
const fileArray = this . helper . getAllFiles ( ) ;
for ( const match of fileArray ) {
const imageName = match . name ;
const encodedUri = match . path ;
const fileName = pathBrowserify . basename ( decodeURI ( encodedUri ) ) ;
if ( file && file . name === fileName ) {
const abstractImageFile = pathBrowserify . join ( basePath , file . path ) ;
if ( isAssetTypeAnImage ( abstractImageFile ) ) {
imageList . push ( {
path : abstractImageFile ,
name : imageName ,
source : match . source ,
} ) ;
2025-08-02 12:09:34 +08:00
}
}
}
if ( imageList . length === 0 ) {
new obsidian . Notice ( t ( "Can not find image file" ) ) ;
return ;
}
2026-05-06 17:32:44 +08:00
this . uploader . uploadFiles ( imageList . map ( item => item . path ) ) . then ( res => {
2025-08-02 12:09:34 +08:00
if ( res . success ) {
2026-05-06 17:32:44 +08:00
let uploadUrlList = res . result ;
imageList . map ( item => {
const uploadImage = uploadUrlList . shift ( ) ;
let name = this . handleName ( item . name ) ;
content = content . replaceAll ( item . source , `  ` ) ;
2025-08-02 12:09:34 +08:00
} ) ;
2026-05-06 17:32:44 +08:00
this . helper . setValue ( content ) ;
if ( this . settings . deleteSource ) {
imageList . map ( image => {
2025-08-02 12:09:34 +08:00
if ( ! image . path . startsWith ( "http" ) ) {
2026-05-06 17:32:44 +08:00
require$$0 . unlink ( image . path , ( ) => { } ) ;
2025-08-02 12:09:34 +08:00
}
} ) ;
}
}
else {
new obsidian . Notice ( "Upload error" ) ;
}
} ) ;
2026-05-06 17:32:44 +08:00
}
filterFile ( fileArray ) {
const imageList = [ ] ;
for ( const match of fileArray ) {
if ( match . path . startsWith ( "http" ) ) {
if ( this . settings . workOnNetWork ) {
if ( ! this . helper . hasBlackDomain ( match . path , this . settings . newWorkBlackDomains ) ) {
imageList . push ( {
path : match . path ,
name : match . name ,
source : match . source ,
} ) ;
2025-08-02 12:09:34 +08:00
}
}
}
2026-05-06 17:32:44 +08:00
else {
imageList . push ( {
path : match . path ,
name : match . name ,
source : match . source ,
} ) ;
2025-08-02 12:09:34 +08:00
}
}
return imageList ;
2026-05-06 17:32:44 +08:00
}
getFile ( fileName , fileMap ) {
2025-08-02 12:09:34 +08:00
if ( ! fileMap ) {
fileMap = arrayToObject ( this . app . vault . getFiles ( ) , "name" ) ;
}
return fileMap [ fileName ] ;
2026-05-06 17:32:44 +08:00
}
2025-08-02 12:09:34 +08:00
// uploda all file
2026-05-06 17:32:44 +08:00
uploadAllFile ( ) {
let content = this . helper . getValue ( ) ;
const basePath = this . app . vault . adapter . getBasePath ( ) ;
const activeFile = this . app . workspace . getActiveFile ( ) ;
const fileMap = arrayToObject ( this . app . vault . getFiles ( ) , "name" ) ;
const filePathMap = arrayToObject ( this . app . vault . getFiles ( ) , "path" ) ;
let imageList = [ ] ;
const fileArray = this . filterFile ( this . helper . getAllFiles ( ) ) ;
for ( const match of fileArray ) {
const imageName = match . name ;
const encodedUri = match . path ;
if ( encodedUri . startsWith ( "http" ) ) {
imageList . push ( {
path : match . path ,
name : imageName ,
source : match . source ,
} ) ;
}
else {
const fileName = pathBrowserify . basename ( decodeURI ( encodedUri ) ) ;
let file ;
// 绝对路径
if ( filePathMap [ decodeURI ( encodedUri ) ] ) {
file = filePathMap [ decodeURI ( encodedUri ) ] ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
// 相对路径
if ( ( ! file && decodeURI ( encodedUri ) . startsWith ( "./" ) ) ||
decodeURI ( encodedUri ) . startsWith ( "../" ) ) {
const filePath = pathBrowserify . resolve ( pathBrowserify . join ( basePath , pathBrowserify . dirname ( activeFile . path ) ) , decodeURI ( encodedUri ) ) ;
if ( require$$0 . existsSync ( filePath ) ) {
const path = obsidian . normalizePath ( pathBrowserify . relative ( obsidian . normalizePath ( basePath ) , obsidian . normalizePath ( pathBrowserify . resolve ( pathBrowserify . join ( basePath , pathBrowserify . dirname ( activeFile . path ) ) , decodeURI ( encodedUri ) ) ) ) ) ;
file = filePathMap [ path ] ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
}
// 尽可能短路径
if ( ! file ) {
file = this . getFile ( fileName , fileMap ) ;
}
if ( file ) {
const abstractImageFile = pathBrowserify . join ( basePath , file . path ) ;
if ( isAssetTypeAnImage ( abstractImageFile ) ) {
imageList . push ( {
path : abstractImageFile ,
name : imageName ,
source : match . source ,
} ) ;
2025-08-02 12:09:34 +08:00
}
}
}
}
if ( imageList . length === 0 ) {
new obsidian . Notice ( t ( "Can not find image file" ) ) ;
return ;
}
else {
2026-05-06 17:32:44 +08:00
new obsidian . Notice ( ` 共找到 ${ imageList . length } 个图像文件,开始上传 ` ) ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
this . uploader . uploadFiles ( imageList . map ( item => item . path ) ) . then ( res => {
2025-08-02 12:09:34 +08:00
if ( res . success ) {
2026-05-06 17:32:44 +08:00
let uploadUrlList = res . result ;
if ( imageList . length !== uploadUrlList . length ) {
2025-08-02 12:09:34 +08:00
new obsidian . Notice ( t ( "Warning: upload files is different of reciver files from api" ) ) ;
}
2026-05-06 17:32:44 +08:00
imageList . map ( item => {
const uploadImage = uploadUrlList . shift ( ) ;
let name = this . handleName ( item . name ) ;
content = content . replaceAll ( item . source , `  ` ) ;
2025-08-02 12:09:34 +08:00
} ) ;
2026-05-06 17:32:44 +08:00
const currentFile = this . app . workspace . getActiveFile ( ) ;
2025-08-02 12:09:34 +08:00
if ( activeFile . path !== currentFile . path ) {
new obsidian . Notice ( t ( "File has been changedd, upload failure" ) ) ;
return ;
}
2026-05-06 17:32:44 +08:00
this . helper . setValue ( content ) ;
if ( this . settings . deleteSource ) {
imageList . map ( image => {
2025-08-02 12:09:34 +08:00
if ( ! image . path . startsWith ( "http" ) ) {
2026-05-06 17:32:44 +08:00
require$$0 . unlink ( image . path , ( ) => { } ) ;
2025-08-02 12:09:34 +08:00
}
} ) ;
}
}
else {
new obsidian . Notice ( "Upload error" ) ;
}
} ) ;
2026-05-06 17:32:44 +08:00
}
setupPasteHandler ( ) {
this . registerEvent ( this . app . workspace . on ( "editor-paste" , ( evt , editor , markdownView ) => {
const allowUpload = this . helper . getFrontmatterValue ( "image-auto-upload" , this . settings . uploadByClipSwitch ) ;
2025-08-02 12:09:34 +08:00
evt . clipboardData . files ;
if ( ! allowUpload ) {
return ;
}
// 剪贴板内容有md格式的图片时
2026-05-06 17:32:44 +08:00
if ( this . settings . workOnNetWork ) {
const clipboardValue = evt . clipboardData . getData ( "text/plain" ) ;
const imageList = this . helper
2025-08-02 12:09:34 +08:00
. getImageLink ( clipboardValue )
2026-05-06 17:32:44 +08:00
. filter ( image => image . path . startsWith ( "http" ) )
. filter ( image => ! this . helper . hasBlackDomain ( image . path , this . settings . newWorkBlackDomains ) ) ;
if ( imageList . length !== 0 ) {
this . uploader
. uploadFiles ( imageList . map ( item => item . path ) )
. then ( res => {
let value = this . helper . getValue ( ) ;
2025-08-02 12:09:34 +08:00
if ( res . success ) {
2026-05-06 17:32:44 +08:00
let uploadUrlList = res . result ;
imageList . map ( item => {
const uploadImage = uploadUrlList . shift ( ) ;
let name = this . handleName ( item . name ) ;
value = value . replaceAll ( item . source , `  ` ) ;
2025-08-02 12:09:34 +08:00
} ) ;
2026-05-06 17:32:44 +08:00
this . helper . setValue ( value ) ;
2025-08-02 12:09:34 +08:00
}
else {
new obsidian . Notice ( "Upload error" ) ;
}
} ) ;
}
}
// 剪贴板中是图片时进行上传
2026-05-06 17:32:44 +08:00
if ( this . canUpload ( evt . clipboardData ) ) {
this . uploadFileAndEmbedImgurImage ( editor , async ( editor , pasteId ) => {
let res ;
res = await this . uploader . uploadFileByClipboard ( evt . clipboardData . files ) ;
if ( res . code !== 0 ) {
this . handleFailedUpload ( editor , pasteId , res . msg ) ;
return ;
}
const url = res . data ;
return url ;
} , evt . clipboardData ) . catch ( ) ;
2025-08-02 12:09:34 +08:00
evt . preventDefault ( ) ;
}
} ) ) ;
2026-05-06 17:32:44 +08:00
this . registerEvent ( this . app . workspace . on ( "editor-drop" , async ( evt , editor , markdownView ) => {
// when ctrl key is pressed, do not upload image, because it is used to set local file
if ( evt . ctrlKey ) {
return ;
}
const allowUpload = this . helper . getFrontmatterValue ( "image-auto-upload" , this . settings . uploadByClipSwitch ) ;
let files = evt . dataTransfer . files ;
if ( ! allowUpload ) {
return ;
}
if ( files . length !== 0 && files [ 0 ] . type . startsWith ( "image" ) ) {
let sendFiles = [ ] ;
let files = evt . dataTransfer . files ;
Array . from ( files ) . forEach ( ( item , index ) => {
if ( item . path ) {
sendFiles . push ( item . path ) ;
}
else {
const { webUtils } = require ( "electron" ) ;
const path = webUtils . getPathForFile ( item ) ;
sendFiles . push ( path ) ;
}
} ) ;
evt . preventDefault ( ) ;
const data = await this . uploader . uploadFiles ( sendFiles ) ;
if ( data . success ) {
data . result . map ( ( value ) => {
let pasteId = ( Math . random ( ) + 1 ) . toString ( 36 ) . substr ( 2 , 5 ) ;
this . insertTemporaryText ( editor , pasteId ) ;
this . embedMarkDownImage ( editor , pasteId , value , files [ 0 ] . name ) ;
} ) ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
else {
new obsidian . Notice ( "Upload error" ) ;
}
}
} ) ) ;
}
canUpload ( clipboardData ) {
2025-08-02 12:09:34 +08:00
this . settings . applyImage ;
2026-05-06 17:32:44 +08:00
const files = clipboardData . files ;
const text = clipboardData . getData ( "text" ) ;
const hasImageFile = files . length !== 0 && files [ 0 ] . type . startsWith ( "image" ) ;
2025-08-02 12:09:34 +08:00
if ( hasImageFile ) {
if ( ! ! text ) {
return this . settings . applyImage ;
}
else {
return true ;
}
}
else {
return false ;
}
2026-05-06 17:32:44 +08:00
}
async uploadFileAndEmbedImgurImage ( editor , callback , clipboardData ) {
let pasteId = ( Math . random ( ) + 1 ) . toString ( 36 ) . substr ( 2 , 5 ) ;
this . insertTemporaryText ( editor , pasteId ) ;
const name = clipboardData . files [ 0 ] . name ;
try {
const url = await callback ( editor , pasteId ) ;
this . embedMarkDownImage ( editor , pasteId , url , name ) ;
}
catch ( e ) {
this . handleFailedUpload ( editor , pasteId , e ) ;
}
}
insertTemporaryText ( editor , pasteId ) {
let progressText = imageAutoUploadPlugin . progressTextFor ( pasteId ) ;
2025-08-02 12:09:34 +08:00
editor . replaceSelection ( progressText + "\n" ) ;
2026-05-06 17:32:44 +08:00
}
static progressTextFor ( id ) {
return ` ![Uploading file... ${ id } ]() ` ;
}
embedMarkDownImage ( editor , pasteId , imageUrl , name = "" ) {
let progressText = imageAutoUploadPlugin . progressTextFor ( pasteId ) ;
2025-08-02 12:09:34 +08:00
name = this . handleName ( name ) ;
2026-05-06 17:32:44 +08:00
let markDownImage = `  ` ;
2025-08-02 12:09:34 +08:00
imageAutoUploadPlugin . replaceFirstOccurrence ( editor , progressText , markDownImage ) ;
2026-05-06 17:32:44 +08:00
}
handleFailedUpload ( editor , pasteId , reason ) {
2025-08-02 12:09:34 +08:00
new obsidian . Notice ( reason ) ;
console . error ( "Failed request: " , reason ) ;
2026-05-06 17:32:44 +08:00
let progressText = imageAutoUploadPlugin . progressTextFor ( pasteId ) ;
2025-08-02 12:09:34 +08:00
imageAutoUploadPlugin . replaceFirstOccurrence ( editor , progressText , "⚠️ upload failed, check dev console" ) ;
2026-05-06 17:32:44 +08:00
}
handleName ( name ) {
const imageSizeSuffix = this . settings . imageSizeSuffix || "" ;
2025-08-02 12:09:34 +08:00
if ( this . settings . imageDesc === "origin" ) {
2026-05-06 17:32:44 +08:00
return ` ${ name } ${ imageSizeSuffix } ` ;
2025-08-02 12:09:34 +08:00
}
else if ( this . settings . imageDesc === "none" ) {
return "" ;
}
else if ( this . settings . imageDesc === "removeDefault" ) {
if ( name === "image.png" ) {
return "" ;
}
else {
2026-05-06 17:32:44 +08:00
return ` ${ name } ${ imageSizeSuffix } ` ;
2025-08-02 12:09:34 +08:00
}
}
else {
2026-05-06 17:32:44 +08:00
return ` ${ name } ${ imageSizeSuffix } ` ;
2025-08-02 12:09:34 +08:00
}
2026-05-06 17:32:44 +08:00
}
static replaceFirstOccurrence ( editor , target , replacement ) {
let lines = editor . getValue ( ) . split ( "\n" ) ;
for ( let i = 0 ; i < lines . length ; i ++ ) {
let ch = lines [ i ] . indexOf ( target ) ;
2025-08-02 12:09:34 +08:00
if ( ch != - 1 ) {
2026-05-06 17:32:44 +08:00
let from = { line : i , ch : ch } ;
let to = { line : i , ch : ch + target . length } ;
2025-08-02 12:09:34 +08:00
editor . replaceRange ( replacement , from , to ) ;
break ;
}
}
2026-05-06 17:32:44 +08:00
}
}
2025-08-02 12:09:34 +08:00
module . exports = imageAutoUploadPlugin ;
2026-05-06 17:32:44 +08:00
/* nosourcemap */