summaryrefslogtreecommitdiff
path: root/node_modules/asar
diff options
context:
space:
mode:
authorLinuxWizard42 <computerwizard@linuxmail.org>2022-10-12 22:54:37 +0300
committerLinuxWizard42 <computerwizard@linuxmail.org>2022-10-12 22:54:37 +0300
commit703e03aba33f234712206769f57717ba7d92d23d (patch)
tree0041f04ccb75bd5379c764e9fe42249fffe75fc3 /node_modules/asar
parentab6e257e6e9d9a483d7e86f220d8b209a2cd7753 (diff)
downloadFlashRunner-703e03aba33f234712206769f57717ba7d92d23d.tar.gz
FlashRunner-703e03aba33f234712206769f57717ba7d92d23d.tar.zst
Added export_allowed file to make repository visible in cgit
Diffstat (limited to 'node_modules/asar')
-rw-r--r--node_modules/asar/CHANGELOG.md140
-rw-r--r--node_modules/asar/LICENSE.md20
-rw-r--r--node_modules/asar/README.md215
-rwxr-xr-xnode_modules/asar/bin/asar.js84
-rw-r--r--node_modules/asar/lib/asar.js219
-rw-r--r--node_modules/asar/lib/crawlfs.js41
-rw-r--r--node_modules/asar/lib/disk.js123
-rw-r--r--node_modules/asar/lib/filesystem.js151
-rw-r--r--node_modules/asar/lib/index.d.ts90
-rw-r--r--node_modules/asar/lib/integrity.js62
-rw-r--r--node_modules/asar/lib/wrapped-fs.js26
-rw-r--r--node_modules/asar/package.json67
12 files changed, 1238 insertions, 0 deletions
diff --git a/node_modules/asar/CHANGELOG.md b/node_modules/asar/CHANGELOG.md
new file mode 100644
index 0000000..d112929
--- /dev/null
+++ b/node_modules/asar/CHANGELOG.md
@@ -0,0 +1,140 @@
+# [3.2.0](https://github.com/electron/asar/compare/v3.1.0...v3.2.0) (2022-07-29)
+
+
+### Features
+
+* give better hints when failing due to bad links ([#241](https://github.com/electron/asar/issues/241)) ([db6d154](https://github.com/electron/asar/commit/db6d1541be05f67933f07c6f30d2425a05f13182))
+
+# Changes By Version
+
+## [2.0.1](https://github.com/electron/asar/compare/v2.0.0...v2.0.1) - 2019-04-29
+
+### Fixed
+
+* Don't monkeypatch fs with promisified versions ([#176](https://github.com/electron/asar/issues/176)) ([319dd81](https://github.com/electron/asar/commit/319dd81))
+
+## [2.0.0](https://github.com/electron/asar/compare/v1.0.0...v2.0.0) - 2019-04-27
+
+### Removed
+
+* Support for Node &lt; 8 ([#170](https://github.com/electron/asar/issues/170)) ([8a03eae](https://github.com/electron/asar/commit/8a03eae))
+
+## 1.0.0 - 2019-02-19
+
+### Added
+
+* Promise-based async API (#165)
+
+### Changed
+
+* This module requires Node 6 or later (#165)
+
+### Removed
+
+* V8 snapshot support, which was undocumented and used an unmaintained dependency, which itself had a dependency with a security vulnerability (#165)
+* callback-style async API - if you still need to use this style of API, please check out the `nodeify` module (#165)
+
+## 0.14.6 - 2018-12-10
+
+### Fixed
+
+* Normalize file paths in `asar.createPackageFromFiles` (#159)
+
+## 0.14.5 - 2018-10-01
+
+### Fixed
+
+* Regression from #154
+
+## 0.14.4 - 2018-09-30
+
+### Added
+
+* `--is-pack` CLI option / `isPack` option to `asar.listPackage` (#145)
+
+### Fixed
+
+* Allow `asar.createPackageFromFiles` to not pass `metadata` or `options` (#154)
+
+## 0.14.3 - 2018-03-07
+
+### Added
+
+* `globOptions` option to pass options to the `glob` module
+* `pattern` option
+
+## 0.14.2 - 2018-02-11
+
+### Fixed
+
+* Invoke callback if the last file in the list was marked as unpacked (#142)
+
+## 0.14.1 - 2018-01-30
+
+### Fixed
+
+* Maximum call stack size exceeded during unpack (#140)
+
+## 0.14.0 - 2017-11-02
+
+### Added
+
+* Snapcraft metadata (#130)
+* `uncache` and `uncacheAll` (#118)
+
+### Fixed
+
+* Use of asar inside of an Electron app (#118)
+
+## 0.13.1 - 2017-11-02
+
+### Fixed
+
+- Do not return before the write stream fully closes (#113)
+
+## 0.13.0 - 2017-01-09
+
+### Changed
+
+- Dropped support for Node `0.10.0` and `0.12.0`. The minimum supported version
+ is now Node `4.6.0`. (#100)
+- This project was ported from CoffeeScript to JavaScript. The behavior and
+ APIs should be the same as previous releases. (#100)
+
+## 0.12.4 - 2016-12-28
+
+### Fixed
+
+- Unpack glob patterns containing `{}` characters not working properly (#99)
+
+## 0.12.3 - 2016-08-29
+
+### Fixed
+
+- Multibyte characters in paths are now supported (#86)
+
+## 0.12.2 - 2016-08-22
+
+### Fixed
+
+- Upgraded `minimatch` to `^3.0.3` from `^3.0.0` for [RegExp DOS fix](https://nodesecurity.io/advisories/minimatch_regular-expression-denial-of-service).
+
+## 0.12.1 - 2016-07-25
+
+### Fixed
+
+- Fix `Maximum call stack size exceeded` error regression (#80)
+
+## 0.12.0 - 2016-07-20
+
+### Added
+
+- Added `transform` option to specify a `stream.Transform` function to the
+ `createPackageWithOptions` API (#73)
+
+## 0.11.0 - 2016-04-06
+
+### Fixed
+
+- Upgraded `mksnapshot` dependency to remove logged `graceful-fs` deprecation
+ warnings (#61)
diff --git a/node_modules/asar/LICENSE.md b/node_modules/asar/LICENSE.md
new file mode 100644
index 0000000..4d231b4
--- /dev/null
+++ b/node_modules/asar/LICENSE.md
@@ -0,0 +1,20 @@
+Copyright (c) 2014 GitHub Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/asar/README.md b/node_modules/asar/README.md
new file mode 100644
index 0000000..b9f2568
--- /dev/null
+++ b/node_modules/asar/README.md
@@ -0,0 +1,215 @@
+# asar - Electron Archive
+
+[![CircleCI build status](https://circleci.com/gh/electron/asar/tree/main.svg?style=shield)](https://circleci.com/gh/electron/asar/tree/main)
+[![npm version](http://img.shields.io/npm/v/asar.svg)](https://npmjs.org/package/asar)
+
+Asar is a simple extensive archive format, it works like `tar` that concatenates
+all files together without compression, while having random access support.
+
+## Features
+
+* Support random access
+* Use JSON to store files' information
+* Very easy to write a parser
+
+## Command line utility
+
+### Install
+
+This module requires Node 10 or later.
+
+```bash
+$ npm install --engine-strict asar
+```
+
+### Usage
+
+```bash
+$ asar --help
+
+ Usage: asar [options] [command]
+
+ Commands:
+
+ pack|p <dir> <output>
+ create asar archive
+
+ list|l <archive>
+ list files of asar archive
+
+ extract-file|ef <archive> <filename>
+ extract one file from archive
+
+ extract|e <archive> <dest>
+ extract archive
+
+
+ Options:
+
+ -h, --help output usage information
+ -V, --version output the version number
+
+```
+
+#### Excluding multiple resources from being packed
+
+Given:
+```
+ app
+(a) ├── x1
+(b) ├── x2
+(c) ├── y3
+(d) │   ├── x1
+(e) │   └── z1
+(f) │   └── x2
+(g) └── z4
+(h) └── w1
+```
+
+Exclude: a, b
+```bash
+$ asar pack app app.asar --unpack-dir "{x1,x2}"
+```
+
+Exclude: a, b, d, f
+```bash
+$ asar pack app app.asar --unpack-dir "**/{x1,x2}"
+```
+
+Exclude: a, b, d, f, h
+```bash
+$ asar pack app app.asar --unpack-dir "{**/x1,**/x2,z4/w1}"
+```
+
+## Using programatically
+
+### Example
+
+```javascript
+const asar = require('asar');
+
+const src = 'some/path/';
+const dest = 'name.asar';
+
+await asar.createPackage(src, dest);
+console.log('done.');
+```
+
+Please note that there is currently **no** error handling provided!
+
+### Transform
+You can pass in a `transform` option, that is a function, which either returns
+nothing, or a `stream.Transform`. The latter will be used on files that will be
+in the `.asar` file to transform them (e.g. compress).
+
+```javascript
+const asar = require('asar');
+
+const src = 'some/path/';
+const dest = 'name.asar';
+
+function transform (filename) {
+ return new CustomTransformStream()
+}
+
+await asar.createPackageWithOptions(src, dest, { transform: transform });
+console.log('done.');
+```
+
+## Using with grunt
+
+There is also an unofficial grunt plugin to generate asar archives at [bwin/grunt-asar][grunt-asar].
+
+## Format
+
+Asar uses [Pickle][pickle] to safely serialize binary value to file, there is
+also a [node.js binding][node-pickle] of `Pickle` class.
+
+The format of asar is very flat:
+
+```
+| UInt32: header_size | String: header | Bytes: file1 | ... | Bytes: file42 |
+```
+
+The `header_size` and `header` are serialized with [Pickle][pickle] class, and
+`header_size`'s [Pickle][pickle] object is 8 bytes.
+
+The `header` is a JSON string, and the `header_size` is the size of `header`'s
+`Pickle` object.
+
+Structure of `header` is something like this:
+
+```json
+{
+ "files": {
+ "tmp": {
+ "files": {}
+ },
+ "usr" : {
+ "files": {
+ "bin": {
+ "files": {
+ "ls": {
+ "offset": "0",
+ "size": 100,
+ "executable": true,
+ "integrity": {
+ "algorithm": "SHA256",
+ "hash": "...",
+ "blockSize": 1024,
+ "blocks": ["...", "..."]
+ }
+ },
+ "cd": {
+ "offset": "100",
+ "size": 100,
+ "executable": true,
+ "integrity": {
+ "algorithm": "SHA256",
+ "hash": "...",
+ "blockSize": 1024,
+ "blocks": ["...", "..."]
+ }
+ }
+ }
+ }
+ }
+ },
+ "etc": {
+ "files": {
+ "hosts": {
+ "offset": "200",
+ "size": 32,
+ "integrity": {
+ "algorithm": "SHA256",
+ "hash": "...",
+ "blockSize": 1024,
+ "blocks": ["...", "..."]
+ }
+ }
+ }
+ }
+ }
+}
+```
+
+`offset` and `size` records the information to read the file from archive, the
+`offset` starts from 0 so you have to manually add the size of `header_size` and
+`header` to the `offset` to get the real offset of the file.
+
+`offset` is a UINT64 number represented in string, because there is no way to
+precisely represent UINT64 in JavaScript `Number`. `size` is a JavaScript
+`Number` that is no larger than `Number.MAX_SAFE_INTEGER`, which has a value of
+`9007199254740991` and is about 8PB in size. We didn't store `size` in UINT64
+because file size in Node.js is represented as `Number` and it is not safe to
+convert `Number` to UINT64.
+
+`integrity` is an object consisting of a few keys:
+* A hashing `algorithm`, currently only `SHA256` is supported.
+* A hex encoded `hash` value representing the hash of the entire file.
+* An array of hex encoded hashes for the `blocks` of the file. i.e. for a blockSize of 4KB this array contains the hash of every block if you split the file into N 4KB blocks.
+* A integer value `blockSize` representing the size in bytes of each block in the `blocks` hashes above
+
+[pickle]: https://chromium.googlesource.com/chromium/src/+/master/base/pickle.h
+[node-pickle]: https://www.npmjs.org/package/chromium-pickle
+[grunt-asar]: https://github.com/bwin/grunt-asar
diff --git a/node_modules/asar/bin/asar.js b/node_modules/asar/bin/asar.js
new file mode 100755
index 0000000..8efcc1a
--- /dev/null
+++ b/node_modules/asar/bin/asar.js
@@ -0,0 +1,84 @@
+#!/usr/bin/env node
+
+var packageJSON = require('../package.json')
+var splitVersion = function (version) { return version.split('.').map(function (part) { return Number(part) }) }
+var requiredNodeVersion = splitVersion(packageJSON.engines.node.slice(2))
+var actualNodeVersion = splitVersion(process.versions.node)
+
+if (actualNodeVersion[0] < requiredNodeVersion[0] || (actualNodeVersion[0] === requiredNodeVersion[0] && actualNodeVersion[1] < requiredNodeVersion[1])) {
+ console.error('CANNOT RUN WITH NODE ' + process.versions.node)
+ console.error('asar requires Node ' + packageJSON.engines.node + '.')
+ process.exit(1)
+}
+
+// Not consts so that this file can load in Node < 4.0
+var asar = require('../lib/asar')
+var program = require('commander')
+
+program.version('v' + packageJSON.version)
+ .description('Manipulate asar archive files')
+
+program.command('pack <dir> <output>')
+ .alias('p')
+ .description('create asar archive')
+ .option('--ordering <file path>', 'path to a text file for ordering contents')
+ .option('--unpack <expression>', 'do not pack files matching glob <expression>')
+ .option('--unpack-dir <expression>', 'do not pack dirs matching glob <expression> or starting with literal <expression>')
+ .option('--exclude-hidden', 'exclude hidden files')
+ .action(function (dir, output, options) {
+ options = {
+ unpack: options.unpack,
+ unpackDir: options.unpackDir,
+ ordering: options.ordering,
+ version: options.sv,
+ arch: options.sa,
+ builddir: options.sb,
+ dot: !options.excludeHidden
+ }
+ asar.createPackageWithOptions(dir, output, options, function (error) {
+ if (error) {
+ console.error(error.stack)
+ process.exit(1)
+ }
+ })
+ })
+
+program.command('list <archive>')
+ .alias('l')
+ .description('list files of asar archive')
+ .option('-i, --is-pack', 'each file in the asar is pack or unpack')
+ .action(function (archive, options) {
+ options = {
+ isPack: options.isPack
+ }
+ var files = asar.listPackage(archive, options)
+ for (var i in files) {
+ console.log(files[i])
+ }
+ })
+
+program.command('extract-file <archive> <filename>')
+ .alias('ef')
+ .description('extract one file from archive')
+ .action(function (archive, filename) {
+ require('fs').writeFileSync(require('path').basename(filename),
+ asar.extractFile(archive, filename))
+ })
+
+program.command('extract <archive> <dest>')
+ .alias('e')
+ .description('extract archive')
+ .action(function (archive, dest) {
+ asar.extractAll(archive, dest)
+ })
+
+program.command('*')
+ .action(function (cmd) {
+ console.log('asar: \'%s\' is not an asar command. See \'asar --help\'.', cmd)
+ })
+
+program.parse(process.argv)
+
+if (program.args.length === 0) {
+ program.help()
+}
diff --git a/node_modules/asar/lib/asar.js b/node_modules/asar/lib/asar.js
new file mode 100644
index 0000000..050e1a7
--- /dev/null
+++ b/node_modules/asar/lib/asar.js
@@ -0,0 +1,219 @@
+'use strict'
+
+const fs = require('./wrapped-fs')
+const path = require('path')
+const minimatch = require('minimatch')
+
+const Filesystem = require('./filesystem')
+const disk = require('./disk')
+const crawlFilesystem = require('./crawlfs')
+
+/**
+ * Whether a directory should be excluded from packing due to the `--unpack-dir" option.
+ *
+ * @param {string} dirPath - directory path to check
+ * @param {string} pattern - literal prefix [for backward compatibility] or glob pattern
+ * @param {array} unpackDirs - Array of directory paths previously marked as unpacked
+ */
+function isUnpackedDir (dirPath, pattern, unpackDirs) {
+ if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) {
+ if (!unpackDirs.includes(dirPath)) {
+ unpackDirs.push(dirPath)
+ }
+ return true
+ } else {
+ return unpackDirs.some(unpackDir => dirPath.startsWith(unpackDir))
+ }
+}
+
+module.exports.createPackage = async function (src, dest) {
+ return module.exports.createPackageWithOptions(src, dest, {})
+}
+
+module.exports.createPackageWithOptions = async function (src, dest, options) {
+ const globOptions = options.globOptions ? options.globOptions : {}
+ globOptions.dot = options.dot === undefined ? true : options.dot
+
+ const pattern = src + (options.pattern ? options.pattern : '/**/*')
+
+ const [filenames, metadata] = await crawlFilesystem(pattern, globOptions)
+ return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options)
+}
+
+/**
+ * Create an ASAR archive from a list of filenames.
+ *
+ * @param {string} src: Base path. All files are relative to this.
+ * @param {string} dest: Archive filename (& path).
+ * @param {array} filenames: List of filenames relative to src.
+ * @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
+ * @param {object} options: Options passed to `createPackageWithOptions`.
+*/
+module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) {
+ if (typeof metadata === 'undefined' || metadata === null) { metadata = {} }
+ if (typeof options === 'undefined' || options === null) { options = {} }
+
+ src = path.normalize(src)
+ dest = path.normalize(dest)
+ filenames = filenames.map(function (filename) { return path.normalize(filename) })
+
+ const filesystem = new Filesystem(src)
+ const files = []
+ const unpackDirs = []
+
+ let filenamesSorted = []
+ if (options.ordering) {
+ const orderingFiles = (await fs.readFile(options.ordering)).toString().split('\n').map(line => {
+ if (line.includes(':')) { line = line.split(':').pop() }
+ line = line.trim()
+ if (line.startsWith('/')) { line = line.slice(1) }
+ return line
+ })
+
+ const ordering = []
+ for (const file of orderingFiles) {
+ const pathComponents = file.split(path.sep)
+ let str = src
+ for (const pathComponent of pathComponents) {
+ str = path.join(str, pathComponent)
+ ordering.push(str)
+ }
+ }
+
+ let missing = 0
+ const total = filenames.length
+
+ for (const file of ordering) {
+ if (!filenamesSorted.includes(file) && filenames.includes(file)) {
+ filenamesSorted.push(file)
+ }
+ }
+
+ for (const file of filenames) {
+ if (!filenamesSorted.includes(file)) {
+ filenamesSorted.push(file)
+ missing += 1
+ }
+ }
+
+ console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`)
+ } else {
+ filenamesSorted = filenames
+ }
+
+ const handleFile = async function (filename) {
+ if (!metadata[filename]) {
+ metadata[filename] = await crawlFilesystem.determineFileType(filename)
+ }
+ const file = metadata[filename]
+
+ let shouldUnpack
+ switch (file.type) {
+ case 'directory':
+ if (options.unpackDir) {
+ shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs)
+ } else {
+ shouldUnpack = false
+ }
+ filesystem.insertDirectory(filename, shouldUnpack)
+ break
+ case 'file':
+ shouldUnpack = false
+ if (options.unpack) {
+ shouldUnpack = minimatch(filename, options.unpack, { matchBase: true })
+ }
+ if (!shouldUnpack && options.unpackDir) {
+ const dirName = path.relative(src, path.dirname(filename))
+ shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs)
+ }
+ files.push({ filename: filename, unpack: shouldUnpack })
+ return filesystem.insertFile(filename, shouldUnpack, file, options)
+ case 'link':
+ filesystem.insertLink(filename)
+ break
+ }
+ return Promise.resolve()
+ }
+
+ const insertsDone = async function () {
+ await fs.mkdirp(path.dirname(dest))
+ return disk.writeFilesystem(dest, filesystem, files, metadata)
+ }
+
+ const names = filenamesSorted.slice()
+
+ const next = async function (name) {
+ if (!name) { return insertsDone() }
+
+ await handleFile(name)
+ return next(names.shift())
+ }
+
+ return next(names.shift())
+}
+
+module.exports.statFile = function (archive, filename, followLinks) {
+ const filesystem = disk.readFilesystemSync(archive)
+ return filesystem.getFile(filename, followLinks)
+}
+
+module.exports.getRawHeader = function (archive) {
+ return disk.readArchiveHeaderSync(archive)
+}
+
+module.exports.listPackage = function (archive, options) {
+ return disk.readFilesystemSync(archive).listFiles(options)
+}
+
+module.exports.extractFile = function (archive, filename) {
+ const filesystem = disk.readFilesystemSync(archive)
+ return disk.readFileSync(filesystem, filename, filesystem.getFile(filename))
+}
+
+module.exports.extractAll = function (archive, dest) {
+ const filesystem = disk.readFilesystemSync(archive)
+ const filenames = filesystem.listFiles()
+
+ // under windows just extract links as regular files
+ const followLinks = process.platform === 'win32'
+
+ // create destination directory
+ fs.mkdirpSync(dest)
+
+ for (const fullPath of filenames) {
+ // Remove leading slash
+ const filename = fullPath.substr(1)
+ const destFilename = path.join(dest, filename)
+ const file = filesystem.getFile(filename, followLinks)
+ if (file.files) {
+ // it's a directory, create it and continue with the next entry
+ fs.mkdirpSync(destFilename)
+ } else if (file.link) {
+ // it's a symlink, create a symlink
+ const linkSrcPath = path.dirname(path.join(dest, file.link))
+ const linkDestPath = path.dirname(destFilename)
+ const relativePath = path.relative(linkDestPath, linkSrcPath)
+ // try to delete output file, because we can't overwrite a link
+ try {
+ fs.unlinkSync(destFilename)
+ } catch {}
+ const linkTo = path.join(relativePath, path.basename(file.link))
+ fs.symlinkSync(linkTo, destFilename)
+ } else {
+ // it's a file, extract it
+ const content = disk.readFileSync(filesystem, filename, file)
+ fs.writeFileSync(destFilename, content)
+ if (file.executable) {
+ fs.chmodSync(destFilename, '755')
+ }
+ }
+ }
+}
+
+module.exports.uncache = function (archive) {
+ return disk.uncacheFilesystem(archive)
+}
+
+module.exports.uncacheAll = function () {
+ disk.uncacheAll()
+}
diff --git a/node_modules/asar/lib/crawlfs.js b/node_modules/asar/lib/crawlfs.js
new file mode 100644
index 0000000..a26c3eb
--- /dev/null
+++ b/node_modules/asar/lib/crawlfs.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const { promisify } = require('util')
+
+const fs = require('./wrapped-fs')
+const glob = promisify(require('glob'))
+
+async function determineFileType (filename) {
+ const stat = await fs.lstat(filename)
+ if (stat.isFile()) {
+ return { type: 'file', stat }
+ } else if (stat.isDirectory()) {
+ return { type: 'directory', stat }
+ } else if (stat.isSymbolicLink()) {
+ return { type: 'link', stat }
+ }
+}
+
+module.exports = async function (dir, options) {
+ const metadata = {}
+ const crawled = await glob(dir, options)
+ const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)]))
+ const links = []
+ const filenames = results.map(([filename, type]) => {
+ if (type) {
+ metadata[filename] = type
+ if (type.type === 'link') links.push(filename)
+ }
+ return filename
+ }).filter((filename) => {
+ // Newer glob can return files inside symlinked directories, to avoid
+ // those appearing in archives we need to manually exclude theme here
+ const exactLinkIndex = links.findIndex(link => filename === link)
+ return links.every((link, index) => {
+ if (index === exactLinkIndex) return true
+ return !filename.startsWith(link)
+ })
+ })
+ return [filenames, metadata]
+}
+module.exports.determineFileType = determineFileType
diff --git a/node_modules/asar/lib/disk.js b/node_modules/asar/lib/disk.js
new file mode 100644
index 0000000..34569a4
--- /dev/null
+++ b/node_modules/asar/lib/disk.js
@@ -0,0 +1,123 @@
+'use strict'
+
+const fs = require('./wrapped-fs')
+const path = require('path')
+const pickle = require('chromium-pickle-js')
+
+const Filesystem = require('./filesystem')
+let filesystemCache = {}
+
+async function copyFile (dest, src, filename) {
+ const srcFile = path.join(src, filename)
+ const targetFile = path.join(dest, filename)
+
+ const [content, stats] = await Promise.all([fs.readFile(srcFile), fs.stat(srcFile), fs.mkdirp(path.dirname(targetFile))])
+ return fs.writeFile(targetFile, content, { mode: stats.mode })
+}
+
+async function streamTransformedFile (originalFilename, outStream, transformed) {
+ return new Promise((resolve, reject) => {
+ const stream = fs.createReadStream(transformed ? transformed.path : originalFilename)
+ stream.pipe(outStream, { end: false })
+ stream.on('error', reject)
+ stream.on('end', () => resolve())
+ })
+}
+
+const writeFileListToStream = async function (dest, filesystem, out, list, metadata) {
+ for (const file of list) {
+ if (file.unpack) { // the file should not be packed into archive
+ const filename = path.relative(filesystem.src, file.filename)
+ await copyFile(`${dest}.unpacked`, filesystem.src, filename)
+ } else {
+ await streamTransformedFile(file.filename, out, metadata[file.filename].transformed)
+ }
+ }
+ return out.end()
+}
+
+module.exports.writeFilesystem = async function (dest, filesystem, files, metadata) {
+ const headerPickle = pickle.createEmpty()
+ headerPickle.writeString(JSON.stringify(filesystem.header))
+ const headerBuf = headerPickle.toBuffer()
+
+ const sizePickle = pickle.createEmpty()
+ sizePickle.writeUInt32(headerBuf.length)
+ const sizeBuf = sizePickle.toBuffer()
+
+ const out = fs.createWriteStream(dest)
+ await new Promise((resolve, reject) => {
+ out.on('error', reject)
+ out.write(sizeBuf)
+ return out.write(headerBuf, () => resolve())
+ })
+ return writeFileListToStream(dest, filesystem, out, files, metadata)
+}
+
+module.exports.readArchiveHeaderSync = function (archive) {
+ const fd = fs.openSync(archive, 'r')
+ let size
+ let headerBuf
+ try {
+ const sizeBuf = Buffer.alloc(8)
+ if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
+ throw new Error('Unable to read header size')
+ }
+
+ const sizePickle = pickle.createFromBuffer(sizeBuf)
+ size = sizePickle.createIterator().readUInt32()
+ headerBuf = Buffer.alloc(size)
+ if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
+ throw new Error('Unable to read header')
+ }
+ } finally {
+ fs.closeSync(fd)
+ }
+
+ const headerPickle = pickle.createFromBuffer(headerBuf)
+ const header = headerPickle.createIterator().readString()
+ return { headerString: header, header: JSON.parse(header), headerSize: size }
+}
+
+module.exports.readFilesystemSync = function (archive) {
+ if (!filesystemCache[archive]) {
+ const header = this.readArchiveHeaderSync(archive)
+ const filesystem = new Filesystem(archive)
+ filesystem.header = header.header
+ filesystem.headerSize = header.headerSize
+ filesystemCache[archive] = filesystem
+ }
+ return filesystemCache[archive]
+}
+
+module.exports.uncacheFilesystem = function (archive) {
+ if (filesystemCache[archive]) {
+ filesystemCache[archive] = undefined
+ return true
+ }
+ return false
+}
+
+module.exports.uncacheAll = function () {
+ filesystemCache = {}
+}
+
+module.exports.readFileSync = function (filesystem, filename, info) {
+ let buffer = Buffer.alloc(info.size)
+ if (info.size <= 0) { return buffer }
+ if (info.unpacked) {
+ // it's an unpacked file, copy it.
+ buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename))
+ } else {
+ // Node throws an exception when reading 0 bytes into a 0-size buffer,
+ // so we short-circuit the read in this case.
+ const fd = fs.openSync(filesystem.src, 'r')
+ try {
+ const offset = 8 + filesystem.headerSize + parseInt(info.offset)
+ fs.readSync(fd, buffer, 0, info.size, offset)
+ } finally {
+ fs.closeSync(fd)
+ }
+ }
+ return buffer
+}
diff --git a/node_modules/asar/lib/filesystem.js b/node_modules/asar/lib/filesystem.js
new file mode 100644
index 0000000..552055e
--- /dev/null
+++ b/node_modules/asar/lib/filesystem.js
@@ -0,0 +1,151 @@
+'use strict'
+
+const fs = require('./wrapped-fs')
+const os = require('os')
+const path = require('path')
+const { promisify } = require('util')
+const stream = require('stream')
+const getFileIntegrity = require('./integrity')
+
+const UINT32_MAX = 2 ** 32 - 1
+
+const pipeline = promisify(stream.pipeline)
+
+class Filesystem {
+ constructor (src) {
+ this.src = path.resolve(src)
+ this.header = { files: {} }
+ this.offset = BigInt(0)
+ }
+
+ searchNodeFromDirectory (p) {
+ let json = this.header
+ const dirs = p.split(path.sep)
+ for (const dir of dirs) {
+ if (dir !== '.') {
+ json = json.files[dir]
+ }
+ }
+ return json
+ }
+
+ searchNodeFromPath (p) {
+ p = path.relative(this.src, p)
+ if (!p) { return this.header }
+ const name = path.basename(p)
+ const node = this.searchNodeFromDirectory(path.dirname(p))
+ if (node.files == null) {
+ node.files = {}
+ }
+ if (node.files[name] == null) {
+ node.files[name] = {}
+ }
+ return node.files[name]
+ }
+
+ insertDirectory (p, shouldUnpack) {
+ const node = this.searchNodeFromPath(p)
+ if (shouldUnpack) {
+ node.unpacked = shouldUnpack
+ }
+ node.files = {}
+ return node.files
+ }
+
+ async insertFile (p, shouldUnpack, file, options) {
+ const dirNode = this.searchNodeFromPath(path.dirname(p))
+ const node = this.searchNodeFromPath(p)
+ if (shouldUnpack || dirNode.unpacked) {
+ node.size = file.stat.size
+ node.unpacked = true
+ node.integrity = await getFileIntegrity(p)
+ return Promise.resolve()
+ }
+
+ let size
+
+ const transformed = options.transform && options.transform(p)
+ if (transformed) {
+ const tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'asar-'))
+ const tmpfile = path.join(tmpdir, path.basename(p))
+ const out = fs.createWriteStream(tmpfile)
+ const readStream = fs.createReadStream(p)
+
+ await pipeline(readStream, transformed, out)
+ file.transformed = {
+ path: tmpfile,
+ stat: await fs.lstat(tmpfile)
+ }
+ size = file.transformed.stat.size
+ } else {
+ size = file.stat.size
+ }
+
+ // JavaScript cannot precisely present integers >= UINT32_MAX.
+ if (size > UINT32_MAX) {
+ throw new Error(`${p}: file size can not be larger than 4.2GB`)
+ }
+
+ node.size = size
+ node.offset = this.offset.toString()
+ node.integrity = await getFileIntegrity(p)
+ if (process.platform !== 'win32' && (file.stat.mode & 0o100)) {
+ node.executable = true
+ }
+ this.offset += BigInt(size)
+ }
+
+ insertLink (p) {
+ const link = path.relative(fs.realpathSync(this.src), fs.realpathSync(p))
+ if (link.substr(0, 2) === '..') {
+ throw new Error(`${p}: file "${link}" links out of the package`)
+ }
+ const node = this.searchNodeFromPath(p)
+ node.link = link
+ return link
+ }
+
+ listFiles (options) {
+ const files = []
+
+ const fillFilesFromMetadata = function (basePath, metadata) {
+ if (!metadata.files) {
+ return
+ }
+
+ for (const [childPath, childMetadata] of Object.entries(metadata.files)) {
+ const fullPath = path.join(basePath, childPath)
+ const packState = childMetadata.unpacked ? 'unpack' : 'pack '
+ files.push((options && options.isPack) ? `${packState} : ${fullPath}` : fullPath)
+ fillFilesFromMetadata(fullPath, childMetadata)
+ }
+ }
+
+ fillFilesFromMetadata('/', this.header)
+ return files
+ }
+
+ getNode (p) {
+ const node = this.searchNodeFromDirectory(path.dirname(p))
+ const name = path.basename(p)
+ if (name) {
+ return node.files[name]
+ } else {
+ return node
+ }
+ }
+
+ getFile (p, followLinks) {
+ followLinks = typeof followLinks === 'undefined' ? true : followLinks
+ const info = this.getNode(p)
+
+ // if followLinks is false we don't resolve symlinks
+ if (info.link && followLinks) {
+ return this.getFile(info.link)
+ } else {
+ return info
+ }
+ }
+}
+
+module.exports = Filesystem
diff --git a/node_modules/asar/lib/index.d.ts b/node_modules/asar/lib/index.d.ts
new file mode 100644
index 0000000..b3790ec
--- /dev/null
+++ b/node_modules/asar/lib/index.d.ts
@@ -0,0 +1,90 @@
+import { IOptions as GlobOptions } from 'glob';
+import { Stats } from 'fs';
+
+export type CreateOptions = {
+ dot?: boolean;
+ globOptions?: GlobOptions;
+ ordering?: string;
+ pattern?: string;
+ transform?: (filePath: string) => NodeJS.ReadWriteStream | void;
+ unpack?: string;
+ unpackDir?: string;
+};
+
+export type ListOptions = {
+ isPack: boolean;
+};
+
+export type EntryMetadata = {
+ unpacked: boolean;
+};
+
+export type DirectoryMetadata = EntryMetadata & {
+ files: { [property: string]: EntryMetadata };
+};
+
+export type FileMetadata = EntryMetadata & {
+ executable?: true;
+ offset?: number;
+ size?: number;
+};
+
+export type LinkMetadata = {
+ link: string;
+};
+
+export type Metadata = DirectoryMetadata | FileMetadata | LinkMetadata;
+
+export type InputMetadataType = 'directory' | 'file' | 'link';
+
+export type InputMetadata = {
+ [property: string]: {
+ type: InputMetadataType;
+ stat: Stats;
+ }
+};
+
+export type DirectoryRecord = {
+ files: Record<string, DirectoryRecord | FileRecord>;
+};
+
+export type FileRecord = {
+ offset: string;
+ size: number;
+ executable?: boolean;
+ integrity: {
+ hash: string;
+ algorithm: 'SHA256';
+ blocks: string[];
+ blockSize: number;
+ };
+}
+
+export type ArchiveHeader = {
+ // The JSON parsed header string
+ header: DirectoryRecord;
+ headerString: string;
+ headerSize: number;
+}
+
+export function createPackage(src: string, dest: string): Promise<void>;
+export function createPackageWithOptions(
+ src: string,
+ dest: string,
+ options: CreateOptions
+): Promise<void>;
+export function createPackageFromFiles(
+ src: string,
+ dest: string,
+ filenames: string[],
+ metadata?: InputMetadata,
+ options?: CreateOptions
+): Promise<void>;
+
+export function statFile(archive: string, filename: string, followLinks?: boolean): Metadata;
+export function getRawHeader(archive: string): ArchiveHeader;
+export function listPackage(archive: string, options?: ListOptions): string[];
+export function extractFile(archive: string, filename: string): Buffer;
+export function extractAll(archive: string, dest: string): void;
+export function uncache(archive: string): boolean;
+export function uncacheAll(): void;
diff --git a/node_modules/asar/lib/integrity.js b/node_modules/asar/lib/integrity.js
new file mode 100644
index 0000000..6fabee4
--- /dev/null
+++ b/node_modules/asar/lib/integrity.js
@@ -0,0 +1,62 @@
+const crypto = require('crypto')
+const fs = require('fs')
+const stream = require('stream')
+const { promisify } = require('util')
+
+const ALGORITHM = 'SHA256'
+// 4MB default block size
+const BLOCK_SIZE = 4 * 1024 * 1024
+
+const pipeline = promisify(stream.pipeline)
+
+function hashBlock (block) {
+ return crypto.createHash(ALGORITHM).update(block).digest('hex')
+}
+
+async function getFileIntegrity (path) {
+ const fileHash = crypto.createHash(ALGORITHM)
+
+ const blocks = []
+ let currentBlockSize = 0
+ let currentBlock = []
+
+ await pipeline(
+ fs.createReadStream(path),
+ new stream.PassThrough({
+ decodeStrings: false,
+ transform (_chunk, encoding, callback) {
+ fileHash.update(_chunk)
+
+ function handleChunk (chunk) {
+ const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength)
+ currentBlockSize += diffToSlice
+ currentBlock.push(chunk.slice(0, diffToSlice))
+ if (currentBlockSize === BLOCK_SIZE) {
+ blocks.push(hashBlock(Buffer.concat(currentBlock)))
+ currentBlock = []
+ currentBlockSize = 0
+ }
+ if (diffToSlice < chunk.byteLength) {
+ handleChunk(chunk.slice(diffToSlice))
+ }
+ }
+ handleChunk(_chunk)
+ callback()
+ },
+ flush (callback) {
+ blocks.push(hashBlock(Buffer.concat(currentBlock)))
+ currentBlock = []
+ callback()
+ }
+ })
+ )
+
+ return {
+ algorithm: ALGORITHM,
+ hash: fileHash.digest('hex'),
+ blockSize: BLOCK_SIZE,
+ blocks: blocks
+ }
+}
+
+module.exports = getFileIntegrity
diff --git a/node_modules/asar/lib/wrapped-fs.js b/node_modules/asar/lib/wrapped-fs.js
new file mode 100644
index 0000000..24f59d0
--- /dev/null
+++ b/node_modules/asar/lib/wrapped-fs.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const fs = process.versions.electron ? require('original-fs') : require('fs')
+
+const promisifiedMethods = [
+ 'lstat',
+ 'mkdtemp',
+ 'readFile',
+ 'stat',
+ 'writeFile'
+]
+
+const promisified = {}
+
+for (const method of Object.keys(fs)) {
+ if (promisifiedMethods.includes(method)) {
+ promisified[method] = fs.promises[method]
+ } else {
+ promisified[method] = fs[method]
+ }
+}
+// To make it more like fs-extra
+promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true })
+promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true })
+
+module.exports = promisified
diff --git a/node_modules/asar/package.json b/node_modules/asar/package.json
new file mode 100644
index 0000000..0ad7009
--- /dev/null
+++ b/node_modules/asar/package.json
@@ -0,0 +1,67 @@
+{
+ "name": "asar",
+ "description": "Creating Electron app packages",
+ "version": "3.2.0",
+ "main": "./lib/asar.js",
+ "types": "./lib/index.d.ts",
+ "bin": {
+ "asar": "./bin/asar.js"
+ },
+ "files": [
+ "bin",
+ "lib",
+ "lib/index.d.ts"
+ ],
+ "engines": {
+ "node": ">=10.12.0"
+ },
+ "license": "MIT",
+ "homepage": "https://github.com/electron/asar",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/electron/asar.git"
+ },
+ "bugs": {
+ "url": "https://github.com/electron/asar/issues"
+ },
+ "scripts": {
+ "mocha": "xvfb-maybe electron-mocha --reporter spec && mocha --reporter spec",
+ "test": "npm run lint && npm run mocha",
+ "lint": "tsd && standard",
+ "standard": "standard",
+ "tsd": "tsd"
+ },
+ "standard": {
+ "env": {
+ "mocha": true
+ },
+ "globals": [
+ "BigInt"
+ ]
+ },
+ "tsd": {
+ "directory": "test"
+ },
+ "dependencies": {
+ "chromium-pickle-js": "^0.2.0",
+ "commander": "^5.0.0",
+ "glob": "^7.1.6",
+ "minimatch": "^3.0.4"
+ },
+ "optionalDependencies": {
+ "@types/glob": "^7.1.1"
+ },
+ "devDependencies": {
+ "@continuous-auth/semantic-release-npm": "^2.0.0",
+ "@semantic-release/changelog": "^5.0.0",
+ "electron": "^5.0.0",
+ "electron-mocha": "^8.2.1",
+ "lodash": "^4.17.15",
+ "mocha": "^7.1.1",
+ "rimraf": "^3.0.2",
+ "semantic-release": "^17.0.4",
+ "standard": "^14.3.3",
+ "tsd": "^0.11.0",
+ "xvfb-maybe": "^0.2.1"
+ }
+}