summaryrefslogtreecommitdiff
path: root/node_modules/asar/lib
diff options
context:
space:
mode:
Diffstat (limited to 'node_modules/asar/lib')
-rw-r--r--node_modules/asar/lib/asar.js219
-rw-r--r--node_modules/asar/lib/crawlfs.js41
-rw-r--r--node_modules/asar/lib/disk.js123
-rw-r--r--node_modules/asar/lib/filesystem.js151
-rw-r--r--node_modules/asar/lib/index.d.ts90
-rw-r--r--node_modules/asar/lib/integrity.js62
-rw-r--r--node_modules/asar/lib/wrapped-fs.js26
7 files changed, 0 insertions, 712 deletions
diff --git a/node_modules/asar/lib/asar.js b/node_modules/asar/lib/asar.js
deleted file mode 100644
index 050e1a7..0000000
--- a/node_modules/asar/lib/asar.js
+++ /dev/null
@@ -1,219 +0,0 @@
-'use strict'
-
-const fs = require('./wrapped-fs')
-const path = require('path')
-const minimatch = require('minimatch')
-
-const Filesystem = require('./filesystem')
-const disk = require('./disk')
-const crawlFilesystem = require('./crawlfs')
-
-/**
- * Whether a directory should be excluded from packing due to the `--unpack-dir" option.
- *
- * @param {string} dirPath - directory path to check
- * @param {string} pattern - literal prefix [for backward compatibility] or glob pattern
- * @param {array} unpackDirs - Array of directory paths previously marked as unpacked
- */
-function isUnpackedDir (dirPath, pattern, unpackDirs) {
- if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) {
- if (!unpackDirs.includes(dirPath)) {
- unpackDirs.push(dirPath)
- }
- return true
- } else {
- return unpackDirs.some(unpackDir => dirPath.startsWith(unpackDir))
- }
-}
-
-module.exports.createPackage = async function (src, dest) {
- return module.exports.createPackageWithOptions(src, dest, {})
-}
-
-module.exports.createPackageWithOptions = async function (src, dest, options) {
- const globOptions = options.globOptions ? options.globOptions : {}
- globOptions.dot = options.dot === undefined ? true : options.dot
-
- const pattern = src + (options.pattern ? options.pattern : '/**/*')
-
- const [filenames, metadata] = await crawlFilesystem(pattern, globOptions)
- return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options)
-}
-
-/**
- * Create an ASAR archive from a list of filenames.
- *
- * @param {string} src: Base path. All files are relative to this.
- * @param {string} dest: Archive filename (& path).
- * @param {array} filenames: List of filenames relative to src.
- * @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
- * @param {object} options: Options passed to `createPackageWithOptions`.
-*/
-module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) {
- if (typeof metadata === 'undefined' || metadata === null) { metadata = {} }
- if (typeof options === 'undefined' || options === null) { options = {} }
-
- src = path.normalize(src)
- dest = path.normalize(dest)
- filenames = filenames.map(function (filename) { return path.normalize(filename) })
-
- const filesystem = new Filesystem(src)
- const files = []
- const unpackDirs = []
-
- let filenamesSorted = []
- if (options.ordering) {
- const orderingFiles = (await fs.readFile(options.ordering)).toString().split('\n').map(line => {
- if (line.includes(':')) { line = line.split(':').pop() }
- line = line.trim()
- if (line.startsWith('/')) { line = line.slice(1) }
- return line
- })
-
- const ordering = []
- for (const file of orderingFiles) {
- const pathComponents = file.split(path.sep)
- let str = src
- for (const pathComponent of pathComponents) {
- str = path.join(str, pathComponent)
- ordering.push(str)
- }
- }
-
- let missing = 0
- const total = filenames.length
-
- for (const file of ordering) {
- if (!filenamesSorted.includes(file) && filenames.includes(file)) {
- filenamesSorted.push(file)
- }
- }
-
- for (const file of filenames) {
- if (!filenamesSorted.includes(file)) {
- filenamesSorted.push(file)
- missing += 1
- }
- }
-
- console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`)
- } else {
- filenamesSorted = filenames
- }
-
- const handleFile = async function (filename) {
- if (!metadata[filename]) {
- metadata[filename] = await crawlFilesystem.determineFileType(filename)
- }
- const file = metadata[filename]
-
- let shouldUnpack
- switch (file.type) {
- case 'directory':
- if (options.unpackDir) {
- shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs)
- } else {
- shouldUnpack = false
- }
- filesystem.insertDirectory(filename, shouldUnpack)
- break
- case 'file':
- shouldUnpack = false
- if (options.unpack) {
- shouldUnpack = minimatch(filename, options.unpack, { matchBase: true })
- }
- if (!shouldUnpack && options.unpackDir) {
- const dirName = path.relative(src, path.dirname(filename))
- shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs)
- }
- files.push({ filename: filename, unpack: shouldUnpack })
- return filesystem.insertFile(filename, shouldUnpack, file, options)
- case 'link':
- filesystem.insertLink(filename)
- break
- }
- return Promise.resolve()
- }
-
- const insertsDone = async function () {
- await fs.mkdirp(path.dirname(dest))
- return disk.writeFilesystem(dest, filesystem, files, metadata)
- }
-
- const names = filenamesSorted.slice()
-
- const next = async function (name) {
- if (!name) { return insertsDone() }
-
- await handleFile(name)
- return next(names.shift())
- }
-
- return next(names.shift())
-}
-
-module.exports.statFile = function (archive, filename, followLinks) {
- const filesystem = disk.readFilesystemSync(archive)
- return filesystem.getFile(filename, followLinks)
-}
-
-module.exports.getRawHeader = function (archive) {
- return disk.readArchiveHeaderSync(archive)
-}
-
-module.exports.listPackage = function (archive, options) {
- return disk.readFilesystemSync(archive).listFiles(options)
-}
-
-module.exports.extractFile = function (archive, filename) {
- const filesystem = disk.readFilesystemSync(archive)
- return disk.readFileSync(filesystem, filename, filesystem.getFile(filename))
-}
-
-module.exports.extractAll = function (archive, dest) {
- const filesystem = disk.readFilesystemSync(archive)
- const filenames = filesystem.listFiles()
-
- // under windows just extract links as regular files
- const followLinks = process.platform === 'win32'
-
- // create destination directory
- fs.mkdirpSync(dest)
-
- for (const fullPath of filenames) {
- // Remove leading slash
- const filename = fullPath.substr(1)
- const destFilename = path.join(dest, filename)
- const file = filesystem.getFile(filename, followLinks)
- if (file.files) {
- // it's a directory, create it and continue with the next entry
- fs.mkdirpSync(destFilename)
- } else if (file.link) {
- // it's a symlink, create a symlink
- const linkSrcPath = path.dirname(path.join(dest, file.link))
- const linkDestPath = path.dirname(destFilename)
- const relativePath = path.relative(linkDestPath, linkSrcPath)
- // try to delete output file, because we can't overwrite a link
- try {
- fs.unlinkSync(destFilename)
- } catch {}
- const linkTo = path.join(relativePath, path.basename(file.link))
- fs.symlinkSync(linkTo, destFilename)
- } else {
- // it's a file, extract it
- const content = disk.readFileSync(filesystem, filename, file)
- fs.writeFileSync(destFilename, content)
- if (file.executable) {
- fs.chmodSync(destFilename, '755')
- }
- }
- }
-}
-
-module.exports.uncache = function (archive) {
- return disk.uncacheFilesystem(archive)
-}
-
-module.exports.uncacheAll = function () {
- disk.uncacheAll()
-}
diff --git a/node_modules/asar/lib/crawlfs.js b/node_modules/asar/lib/crawlfs.js
deleted file mode 100644
index a26c3eb..0000000
--- a/node_modules/asar/lib/crawlfs.js
+++ /dev/null
@@ -1,41 +0,0 @@
-'use strict'
-
-const { promisify } = require('util')
-
-const fs = require('./wrapped-fs')
-const glob = promisify(require('glob'))
-
-async function determineFileType (filename) {
- const stat = await fs.lstat(filename)
- if (stat.isFile()) {
- return { type: 'file', stat }
- } else if (stat.isDirectory()) {
- return { type: 'directory', stat }
- } else if (stat.isSymbolicLink()) {
- return { type: 'link', stat }
- }
-}
-
-module.exports = async function (dir, options) {
- const metadata = {}
- const crawled = await glob(dir, options)
- const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)]))
- const links = []
- const filenames = results.map(([filename, type]) => {
- if (type) {
- metadata[filename] = type
- if (type.type === 'link') links.push(filename)
- }
- return filename
- }).filter((filename) => {
- // Newer glob can return files inside symlinked directories, to avoid
- // those appearing in archives we need to manually exclude theme here
- const exactLinkIndex = links.findIndex(link => filename === link)
- return links.every((link, index) => {
- if (index === exactLinkIndex) return true
- return !filename.startsWith(link)
- })
- })
- return [filenames, metadata]
-}
-module.exports.determineFileType = determineFileType
diff --git a/node_modules/asar/lib/disk.js b/node_modules/asar/lib/disk.js
deleted file mode 100644
index 34569a4..0000000
--- a/node_modules/asar/lib/disk.js
+++ /dev/null
@@ -1,123 +0,0 @@
-'use strict'
-
-const fs = require('./wrapped-fs')
-const path = require('path')
-const pickle = require('chromium-pickle-js')
-
-const Filesystem = require('./filesystem')
-let filesystemCache = {}
-
-async function copyFile (dest, src, filename) {
- const srcFile = path.join(src, filename)
- const targetFile = path.join(dest, filename)
-
- const [content, stats] = await Promise.all([fs.readFile(srcFile), fs.stat(srcFile), fs.mkdirp(path.dirname(targetFile))])
- return fs.writeFile(targetFile, content, { mode: stats.mode })
-}
-
-async function streamTransformedFile (originalFilename, outStream, transformed) {
- return new Promise((resolve, reject) => {
- const stream = fs.createReadStream(transformed ? transformed.path : originalFilename)
- stream.pipe(outStream, { end: false })
- stream.on('error', reject)
- stream.on('end', () => resolve())
- })
-}
-
-const writeFileListToStream = async function (dest, filesystem, out, list, metadata) {
- for (const file of list) {
- if (file.unpack) { // the file should not be packed into archive
- const filename = path.relative(filesystem.src, file.filename)
- await copyFile(`${dest}.unpacked`, filesystem.src, filename)
- } else {
- await streamTransformedFile(file.filename, out, metadata[file.filename].transformed)
- }
- }
- return out.end()
-}
-
-module.exports.writeFilesystem = async function (dest, filesystem, files, metadata) {
- const headerPickle = pickle.createEmpty()
- headerPickle.writeString(JSON.stringify(filesystem.header))
- const headerBuf = headerPickle.toBuffer()
-
- const sizePickle = pickle.createEmpty()
- sizePickle.writeUInt32(headerBuf.length)
- const sizeBuf = sizePickle.toBuffer()
-
- const out = fs.createWriteStream(dest)
- await new Promise((resolve, reject) => {
- out.on('error', reject)
- out.write(sizeBuf)
- return out.write(headerBuf, () => resolve())
- })
- return writeFileListToStream(dest, filesystem, out, files, metadata)
-}
-
-module.exports.readArchiveHeaderSync = function (archive) {
- const fd = fs.openSync(archive, 'r')
- let size
- let headerBuf
- try {
- const sizeBuf = Buffer.alloc(8)
- if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
- throw new Error('Unable to read header size')
- }
-
- const sizePickle = pickle.createFromBuffer(sizeBuf)
- size = sizePickle.createIterator().readUInt32()
- headerBuf = Buffer.alloc(size)
- if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
- throw new Error('Unable to read header')
- }
- } finally {
- fs.closeSync(fd)
- }
-
- const headerPickle = pickle.createFromBuffer(headerBuf)
- const header = headerPickle.createIterator().readString()
- return { headerString: header, header: JSON.parse(header), headerSize: size }
-}
-
-module.exports.readFilesystemSync = function (archive) {
- if (!filesystemCache[archive]) {
- const header = this.readArchiveHeaderSync(archive)
- const filesystem = new Filesystem(archive)
- filesystem.header = header.header
- filesystem.headerSize = header.headerSize
- filesystemCache[archive] = filesystem
- }
- return filesystemCache[archive]
-}
-
-module.exports.uncacheFilesystem = function (archive) {
- if (filesystemCache[archive]) {
- filesystemCache[archive] = undefined
- return true
- }
- return false
-}
-
-module.exports.uncacheAll = function () {
- filesystemCache = {}
-}
-
-module.exports.readFileSync = function (filesystem, filename, info) {
- let buffer = Buffer.alloc(info.size)
- if (info.size <= 0) { return buffer }
- if (info.unpacked) {
- // it's an unpacked file, copy it.
- buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename))
- } else {
- // Node throws an exception when reading 0 bytes into a 0-size buffer,
- // so we short-circuit the read in this case.
- const fd = fs.openSync(filesystem.src, 'r')
- try {
- const offset = 8 + filesystem.headerSize + parseInt(info.offset)
- fs.readSync(fd, buffer, 0, info.size, offset)
- } finally {
- fs.closeSync(fd)
- }
- }
- return buffer
-}
diff --git a/node_modules/asar/lib/filesystem.js b/node_modules/asar/lib/filesystem.js
deleted file mode 100644
index 552055e..0000000
--- a/node_modules/asar/lib/filesystem.js
+++ /dev/null
@@ -1,151 +0,0 @@
-'use strict'
-
-const fs = require('./wrapped-fs')
-const os = require('os')
-const path = require('path')
-const { promisify } = require('util')
-const stream = require('stream')
-const getFileIntegrity = require('./integrity')
-
-const UINT32_MAX = 2 ** 32 - 1
-
-const pipeline = promisify(stream.pipeline)
-
-class Filesystem {
- constructor (src) {
- this.src = path.resolve(src)
- this.header = { files: {} }
- this.offset = BigInt(0)
- }
-
- searchNodeFromDirectory (p) {
- let json = this.header
- const dirs = p.split(path.sep)
- for (const dir of dirs) {
- if (dir !== '.') {
- json = json.files[dir]
- }
- }
- return json
- }
-
- searchNodeFromPath (p) {
- p = path.relative(this.src, p)
- if (!p) { return this.header }
- const name = path.basename(p)
- const node = this.searchNodeFromDirectory(path.dirname(p))
- if (node.files == null) {
- node.files = {}
- }
- if (node.files[name] == null) {
- node.files[name] = {}
- }
- return node.files[name]
- }
-
- insertDirectory (p, shouldUnpack) {
- const node = this.searchNodeFromPath(p)
- if (shouldUnpack) {
- node.unpacked = shouldUnpack
- }
- node.files = {}
- return node.files
- }
-
- async insertFile (p, shouldUnpack, file, options) {
- const dirNode = this.searchNodeFromPath(path.dirname(p))
- const node = this.searchNodeFromPath(p)
- if (shouldUnpack || dirNode.unpacked) {
- node.size = file.stat.size
- node.unpacked = true
- node.integrity = await getFileIntegrity(p)
- return Promise.resolve()
- }
-
- let size
-
- const transformed = options.transform && options.transform(p)
- if (transformed) {
- const tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'asar-'))
- const tmpfile = path.join(tmpdir, path.basename(p))
- const out = fs.createWriteStream(tmpfile)
- const readStream = fs.createReadStream(p)
-
- await pipeline(readStream, transformed, out)
- file.transformed = {
- path: tmpfile,
- stat: await fs.lstat(tmpfile)
- }
- size = file.transformed.stat.size
- } else {
- size = file.stat.size
- }
-
- // JavaScript cannot precisely present integers >= UINT32_MAX.
- if (size > UINT32_MAX) {
- throw new Error(`${p}: file size can not be larger than 4.2GB`)
- }
-
- node.size = size
- node.offset = this.offset.toString()
- node.integrity = await getFileIntegrity(p)
- if (process.platform !== 'win32' && (file.stat.mode & 0o100)) {
- node.executable = true
- }
- this.offset += BigInt(size)
- }
-
- insertLink (p) {
- const link = path.relative(fs.realpathSync(this.src), fs.realpathSync(p))
- if (link.substr(0, 2) === '..') {
- throw new Error(`${p}: file "${link}" links out of the package`)
- }
- const node = this.searchNodeFromPath(p)
- node.link = link
- return link
- }
-
- listFiles (options) {
- const files = []
-
- const fillFilesFromMetadata = function (basePath, metadata) {
- if (!metadata.files) {
- return
- }
-
- for (const [childPath, childMetadata] of Object.entries(metadata.files)) {
- const fullPath = path.join(basePath, childPath)
- const packState = childMetadata.unpacked ? 'unpack' : 'pack '
- files.push((options && options.isPack) ? `${packState} : ${fullPath}` : fullPath)
- fillFilesFromMetadata(fullPath, childMetadata)
- }
- }
-
- fillFilesFromMetadata('/', this.header)
- return files
- }
-
- getNode (p) {
- const node = this.searchNodeFromDirectory(path.dirname(p))
- const name = path.basename(p)
- if (name) {
- return node.files[name]
- } else {
- return node
- }
- }
-
- getFile (p, followLinks) {
- followLinks = typeof followLinks === 'undefined' ? true : followLinks
- const info = this.getNode(p)
-
- // if followLinks is false we don't resolve symlinks
- if (info.link && followLinks) {
- return this.getFile(info.link)
- } else {
- return info
- }
- }
-}
-
-module.exports = Filesystem
diff --git a/node_modules/asar/lib/index.d.ts b/node_modules/asar/lib/index.d.ts
deleted file mode 100644
index b3790ec..0000000
--- a/node_modules/asar/lib/index.d.ts
+++ /dev/null
@@ -1,90 +0,0 @@
-import { IOptions as GlobOptions } from 'glob';
-import { Stats } from 'fs';
-
-export type CreateOptions = {
- dot?: boolean;
- globOptions?: GlobOptions;
- ordering?: string;
- pattern?: string;
- transform?: (filePath: string) => NodeJS.ReadWriteStream | void;
- unpack?: string;
- unpackDir?: string;
-};
-
-export type ListOptions = {
- isPack: boolean;
-};
-
-export type EntryMetadata = {
- unpacked: boolean;
-};
-
-export type DirectoryMetadata = EntryMetadata & {
- files: { [property: string]: EntryMetadata };
-};
-
-export type FileMetadata = EntryMetadata & {
- executable?: true;
- offset?: number;
- size?: number;
-};
-
-export type LinkMetadata = {
- link: string;
-};
-
-export type Metadata = DirectoryMetadata | FileMetadata | LinkMetadata;
-
-export type InputMetadataType = 'directory' | 'file' | 'link';
-
-export type InputMetadata = {
- [property: string]: {
- type: InputMetadataType;
- stat: Stats;
- }
-};
-
-export type DirectoryRecord = {
- files: Record<string, DirectoryRecord | FileRecord>;
-};
-
-export type FileRecord = {
- offset: string;
- size: number;
- executable?: boolean;
- integrity: {
- hash: string;
- algorithm: 'SHA256';
- blocks: string[];
- blockSize: number;
- };
-}
-
-export type ArchiveHeader = {
- // The JSON parsed header string
- header: DirectoryRecord;
- headerString: string;
- headerSize: number;
-}
-
-export function createPackage(src: string, dest: string): Promise<void>;
-export function createPackageWithOptions(
- src: string,
- dest: string,
- options: CreateOptions
-): Promise<void>;
-export function createPackageFromFiles(
- src: string,
- dest: string,
- filenames: string[],
- metadata?: InputMetadata,
- options?: CreateOptions
-): Promise<void>;
-
-export function statFile(archive: string, filename: string, followLinks?: boolean): Metadata;
-export function getRawHeader(archive: string): ArchiveHeader;
-export function listPackage(archive: string, options?: ListOptions): string[];
-export function extractFile(archive: string, filename: string): Buffer;
-export function extractAll(archive: string, dest: string): void;
-export function uncache(archive: string): boolean;
-export function uncacheAll(): void;
diff --git a/node_modules/asar/lib/integrity.js b/node_modules/asar/lib/integrity.js
deleted file mode 100644
index 6fabee4..0000000
--- a/node_modules/asar/lib/integrity.js
+++ /dev/null
@@ -1,62 +0,0 @@
-const crypto = require('crypto')
-const fs = require('fs')
-const stream = require('stream')
-const { promisify } = require('util')
-
-const ALGORITHM = 'SHA256'
-// 4MB default block size
-const BLOCK_SIZE = 4 * 1024 * 1024
-
-const pipeline = promisify(stream.pipeline)
-
-function hashBlock (block) {
- return crypto.createHash(ALGORITHM).update(block).digest('hex')
-}
-
-async function getFileIntegrity (path) {
- const fileHash = crypto.createHash(ALGORITHM)
-
- const blocks = []
- let currentBlockSize = 0
- let currentBlock = []
-
- await pipeline(
- fs.createReadStream(path),
- new stream.PassThrough({
- decodeStrings: false,
- transform (_chunk, encoding, callback) {
- fileHash.update(_chunk)
-
- function handleChunk (chunk) {
- const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength)
- currentBlockSize += diffToSlice
- currentBlock.push(chunk.slice(0, diffToSlice))
- if (currentBlockSize === BLOCK_SIZE) {
- blocks.push(hashBlock(Buffer.concat(currentBlock)))
- currentBlock = []
- currentBlockSize = 0
- }
- if (diffToSlice < chunk.byteLength) {
- handleChunk(chunk.slice(diffToSlice))
- }
- }
- handleChunk(_chunk)
- callback()
- },
- flush (callback) {
- blocks.push(hashBlock(Buffer.concat(currentBlock)))
- currentBlock = []
- callback()
- }
- })
- )
-
- return {
- algorithm: ALGORITHM,
- hash: fileHash.digest('hex'),
- blockSize: BLOCK_SIZE,
- blocks: blocks
- }
-}
-
-module.exports = getFileIntegrity
diff --git a/node_modules/asar/lib/wrapped-fs.js b/node_modules/asar/lib/wrapped-fs.js
deleted file mode 100644
index 24f59d0..0000000
--- a/node_modules/asar/lib/wrapped-fs.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const fs = process.versions.electron ? require('original-fs') : require('fs')
-
-const promisifiedMethods = [
- 'lstat',
- 'mkdtemp',
- 'readFile',
- 'stat',
- 'writeFile'
-]
-
-const promisified = {}
-
-for (const method of Object.keys(fs)) {
- if (promisifiedMethods.includes(method)) {
- promisified[method] = fs.promises[method]
- } else {
- promisified[method] = fs[method]
- }
-}
-// To make it more like fs-extra
-promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true })
-promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true })
-
-module.exports = promisified