main
郭建 1 year ago
parent 9fef185884
commit 316c573fc9

Binary file not shown.

@ -2,8 +2,8 @@ const { app, BrowserWindow } = require('electron')
const createWindow = () => {
const win = new BrowserWindow({
frame: false,
fullscreen: true,
frame: false, //隐藏头部操作栏
fullscreen: true, //是否全屏
// icon: path.join(__dirname, '/bb.png'), //这里指定图标
})

1
node_modules/.bin/asar generated vendored

@ -1 +0,0 @@
../asar/bin/asar.js

1
node_modules/.bin/color-support generated vendored

@ -1 +0,0 @@
../color-support/bin.js

1
node_modules/.bin/electron generated vendored

@ -1 +0,0 @@
../electron/cli.js

1
node_modules/.bin/electron-forge generated vendored

@ -1 +0,0 @@
../@electron-forge/cli/dist/electron-forge.js

@ -1 +0,0 @@
../@electron-forge/cli/script/vscode.sh

@ -1,12 +0,0 @@
@SETLOCAL
@IF NOT DEFINED NODE_PATH (
@SET "NODE_PATH=%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge"
) ELSE (
@SET "NODE_PATH=%NODE_PATH%;%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge"
)
@IF EXIST "%~dp0\bash.exe" (
"%~dp0\bash.exe" "%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge\cli\script\vscode.sh" %*
) ELSE (
@SET PATHEXT=%PATHEXT:;.JS;=;%
bash "%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge\cli\script\vscode.sh" %*
)

@ -1,41 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
$pathsep=":"
$env_node_path=$env:NODE_PATH
$new_node_path="$basedir\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge"
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
$pathsep=";"
} else {
$new_node_path="$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge"
}
if ([string]::IsNullOrEmpty($env_node_path)) {
$env:NODE_PATH=$new_node_path
} else {
$env:NODE_PATH="$env_node_path$pathsep$new_node_path"
}
$ret=0
if (Test-Path "$basedir/bash$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/bash$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/script/vscode.sh" $args
} else {
& "$basedir/bash$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/script/vscode.sh" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "bash$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/script/vscode.sh" $args
} else {
& "bash$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/script/vscode.sh" $args
}
$ret=$LASTEXITCODE
}
$env:NODE_PATH=$env_node_path
exit $ret

@ -1 +0,0 @@
../@electron-forge/cli/script/vscode.cmd

@ -1,12 +0,0 @@
@SETLOCAL
@IF NOT DEFINED NODE_PATH (
@SET "NODE_PATH=%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge"
) ELSE (
@SET "NODE_PATH=%NODE_PATH%;%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge"
)
@IF EXIST "%~dp0\cmd.exe" (
"%~dp0\cmd.exe" "%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge\cli\script\vscode.cmd" %*
) ELSE (
@SET PATHEXT=%PATHEXT:;.JS;=;%
cmd "%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge\cli\script\vscode.cmd" %*
)

@ -1,41 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
$pathsep=":"
$env_node_path=$env:NODE_PATH
$new_node_path="$basedir\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge"
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
$pathsep=";"
} else {
$new_node_path="$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge"
}
if ([string]::IsNullOrEmpty($env_node_path)) {
$env:NODE_PATH=$new_node_path
} else {
$env:NODE_PATH="$env_node_path$pathsep$new_node_path"
}
$ret=0
if (Test-Path "$basedir/cmd$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/cmd$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/script/vscode.cmd" $args
} else {
& "$basedir/cmd$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/script/vscode.cmd" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "cmd$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/script/vscode.cmd" $args
} else {
& "cmd$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/script/vscode.cmd" $args
}
$ret=$LASTEXITCODE
}
$env:NODE_PATH=$env_node_path
exit $ret

@ -1,12 +0,0 @@
@SETLOCAL
@IF NOT DEFINED NODE_PATH (
@SET "NODE_PATH=%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge"
) ELSE (
@SET "NODE_PATH=%NODE_PATH%;%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge"
)
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge\cli\dist\electron-forge.js" %*
) ELSE (
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge\cli\dist\electron-forge.js" %*
)

@ -1,41 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
$pathsep=":"
$env_node_path=$env:NODE_PATH
$new_node_path="$basedir\..\.store\@electron-forge+cli@6.4.2\node_modules\@electron-forge"
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
$pathsep=";"
} else {
$new_node_path="$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge"
}
if ([string]::IsNullOrEmpty($env_node_path)) {
$env:NODE_PATH=$new_node_path
} else {
$env:NODE_PATH="$env_node_path$pathsep$new_node_path"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/dist/electron-forge.js" $args
} else {
& "$basedir/node$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/dist/electron-forge.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/dist/electron-forge.js" $args
} else {
& "node$exe" "$basedir/../.store/@electron-forge+cli@6.4.2/node_modules/@electron-forge/cli/dist/electron-forge.js" $args
}
$ret=$LASTEXITCODE
}
$env:NODE_PATH=$env_node_path
exit $ret

@ -1 +0,0 @@
../electron-installer-debian/src/cli.js

@ -1 +0,0 @@
../electron-installer-redhat/src/cli.js

@ -1 +0,0 @@
../@electron/osx-sign/bin/electron-osx-flat.js

@ -1 +0,0 @@
../@electron/osx-sign/bin/electron-osx-sign.js

@ -1 +0,0 @@
../electron-packager/bin/electron-packager.js

@ -1 +0,0 @@
../@electron/rebuild/lib/cli.js

41
node_modules/.bin/electron.ps1 generated vendored

@ -1,41 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
$pathsep=":"
$env_node_path=$env:NODE_PATH
$new_node_path="$basedir\..\.store\electron@27.0.0\node_modules"
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
$pathsep=";"
} else {
$new_node_path="$basedir/../.store/electron@27.0.0/node_modules"
}
if ([string]::IsNullOrEmpty($env_node_path)) {
$env:NODE_PATH=$new_node_path
} else {
$env:NODE_PATH="$env_node_path$pathsep$new_node_path"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../.store/electron@27.0.0/node_modules/electron/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../.store/electron@27.0.0/node_modules/electron/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../.store/electron@27.0.0/node_modules/electron/cli.js" $args
} else {
& "node$exe" "$basedir/../.store/electron@27.0.0/node_modules/electron/cli.js" $args
}
$ret=$LASTEXITCODE
}
$env:NODE_PATH=$env_node_path
exit $ret

1
node_modules/.bin/extract-zip generated vendored

@ -1 +0,0 @@
../extract-zip/cli.js

@ -1 +0,0 @@
../get-folder-size/bin/get-folder-size

1
node_modules/.bin/is-docker generated vendored

@ -1 +0,0 @@
../is-docker/cli.js

1
node_modules/.bin/mkdirp generated vendored

@ -1 +0,0 @@
../mkdirp/bin/cmd.js

1
node_modules/.bin/node-gyp generated vendored

@ -1 +0,0 @@
../node-gyp/bin/node-gyp.js

1
node_modules/.bin/node-which generated vendored

@ -1 +0,0 @@
../which/bin/node-which

1
node_modules/.bin/nopt generated vendored

@ -1 +0,0 @@
../nopt/bin/nopt.js

1
node_modules/.bin/resolve generated vendored

@ -1 +0,0 @@
../resolve/bin/resolve

1
node_modules/.bin/rimraf generated vendored

@ -1 +0,0 @@
../rimraf/bin.js

1
node_modules/.bin/semver generated vendored

@ -1 +0,0 @@
../semver/bin/semver.js

1
node_modules/.bin/yarn-or-npm generated vendored

@ -1 +0,0 @@
../yarn-or-npm/bin/index.js

1
node_modules/.bin/yon generated vendored

@ -1 +0,0 @@
../yarn-or-npm/bin/index.js

5029
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

@ -1,6 +0,0 @@
Recently updated (since 2023-10-04)
Today:
→ @electron-forge/cli@6.4.2 @electron-forge/shared-types@6.4.2 @electron/rebuild@3.3.0 node-abi@^3.45.0(3.49.0) (07:07:54)
→ @electron-forge/cli@6.4.2 @electron-forge/shared-types@6.4.2 electron-packager@17.1.2 resolve@^1.1.6(1.22.8) (05:37:14)
2023-10-06
→ @electron-forge/cli@6.4.2 @electron-forge/shared-types@6.4.2 electron-packager@17.1.2 get-package-info@1.0.0 read-pkg-up@2.0.0 read-pkg@2.0.0 normalize-package-data@2.5.0 validate-npm-package-license@3.0.4 spdx-correct@3.2.0 spdx-license-ids@^3.0.0(3.0.16) (00:57:49)

@ -1,20 +0,0 @@
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -1,213 +0,0 @@
# @electron/asar - Electron Archive
[![CircleCI build status](https://circleci.com/gh/electron/asar/tree/main.svg?style=shield)](https://circleci.com/gh/electron/asar/tree/main)
[![npm version](http://img.shields.io/npm/v/@electron/asar.svg)](https://npmjs.org/package/@electron/asar)
Asar is a simple extensive archive format, it works like `tar` that concatenates
all files together without compression, while having random access support.
## Features
* Support random access
* Use JSON to store files' information
* Very easy to write a parser
## Command line utility
### Install
This module requires Node 10 or later.
```bash
$ npm install --engine-strict @electron/asar
```
### Usage
```bash
$ asar --help
Usage: asar [options] [command]
Commands:
pack|p <dir> <output>
create asar archive
list|l <archive>
list files of asar archive
extract-file|ef <archive> <filename>
extract one file from archive
extract|e <archive> <dest>
extract archive
Options:
-h, --help output usage information
-V, --version output the version number
```
#### Excluding multiple resources from being packed
Given:
```
app
(a) ├── x1
(b) ├── x2
(c) ├── y3
(d) │   ├── x1
(e) │   └── z1
(f) │   └── x2
(g) └── z4
(h) └── w1
```
Exclude: a, b
```bash
$ asar pack app app.asar --unpack-dir "{x1,x2}"
```
Exclude: a, b, d, f
```bash
$ asar pack app app.asar --unpack-dir "**/{x1,x2}"
```
Exclude: a, b, d, f, h
```bash
$ asar pack app app.asar --unpack-dir "{**/x1,**/x2,z4/w1}"
```
## Using programatically
### Example
```javascript
const asar = require('@electron/asar');
const src = 'some/path/';
const dest = 'name.asar';
await asar.createPackage(src, dest);
console.log('done.');
```
Please note that there is currently **no** error handling provided!
### Transform
You can pass in a `transform` option, that is a function, which either returns
nothing, or a `stream.Transform`. The latter will be used on files that will be
in the `.asar` file to transform them (e.g. compress).
```javascript
const asar = require('@electron/asar');
const src = 'some/path/';
const dest = 'name.asar';
function transform (filename) {
return new CustomTransformStream()
}
await asar.createPackageWithOptions(src, dest, { transform: transform });
console.log('done.');
```
## Using with grunt
There is also an unofficial grunt plugin to generate asar archives at [bwin/grunt-asar][grunt-asar].
## Format
Asar uses [Pickle][pickle] to safely serialize binary value to file.
The format of asar is very flat:
```
| UInt32: header_size | String: header | Bytes: file1 | ... | Bytes: file42 |
```
The `header_size` and `header` are serialized with [Pickle][pickle] class, and
`header_size`'s [Pickle][pickle] object is 8 bytes.
The `header` is a JSON string, and the `header_size` is the size of `header`'s
`Pickle` object.
Structure of `header` is something like this:
```json
{
"files": {
"tmp": {
"files": {}
},
"usr" : {
"files": {
"bin": {
"files": {
"ls": {
"offset": "0",
"size": 100,
"executable": true,
"integrity": {
"algorithm": "SHA256",
"hash": "...",
"blockSize": 1024,
"blocks": ["...", "..."]
}
},
"cd": {
"offset": "100",
"size": 100,
"executable": true,
"integrity": {
"algorithm": "SHA256",
"hash": "...",
"blockSize": 1024,
"blocks": ["...", "..."]
}
}
}
}
}
},
"etc": {
"files": {
"hosts": {
"offset": "200",
"size": 32,
"integrity": {
"algorithm": "SHA256",
"hash": "...",
"blockSize": 1024,
"blocks": ["...", "..."]
}
}
}
}
}
}
```
`offset` and `size` records the information to read the file from archive, the
`offset` starts from 0 so you have to manually add the size of `header_size` and
`header` to the `offset` to get the real offset of the file.
`offset` is a UINT64 number represented in string, because there is no way to
precisely represent UINT64 in JavaScript `Number`. `size` is a JavaScript
`Number` that is no larger than `Number.MAX_SAFE_INTEGER`, which has a value of
`9007199254740991` and is about 8PB in size. We didn't store `size` in UINT64
because file size in Node.js is represented as `Number` and it is not safe to
convert `Number` to UINT64.
`integrity` is an object consisting of a few keys:
* A hashing `algorithm`, currently only `SHA256` is supported.
* A hex encoded `hash` value representing the hash of the entire file.
* An array of hex encoded hashes for the `blocks` of the file. i.e. for a blockSize of 4KB this array contains the hash of every block if you split the file into N 4KB blocks.
* A integer value `blockSize` representing the size in bytes of each block in the `blocks` hashes above
[pickle]: https://chromium.googlesource.com/chromium/src/+/main/base/pickle.h
[grunt-asar]: https://github.com/bwin/grunt-asar

@ -1,82 +0,0 @@
#!/usr/bin/env node
var packageJSON = require('../package.json')
var splitVersion = function (version) { return version.split('.').map(function (part) { return Number(part) }) }
var requiredNodeVersion = splitVersion(packageJSON.engines.node.slice(2))
var actualNodeVersion = splitVersion(process.versions.node)
if (actualNodeVersion[0] < requiredNodeVersion[0] || (actualNodeVersion[0] === requiredNodeVersion[0] && actualNodeVersion[1] < requiredNodeVersion[1])) {
console.error('CANNOT RUN WITH NODE ' + process.versions.node)
console.error('asar requires Node ' + packageJSON.engines.node + '.')
process.exit(1)
}
// Not consts so that this file can load in Node < 4.0
var asar = require('../lib/asar')
var program = require('commander')
program.version('v' + packageJSON.version)
.description('Manipulate asar archive files')
program.command('pack <dir> <output>')
.alias('p')
.description('create asar archive')
.option('--ordering <file path>', 'path to a text file for ordering contents')
.option('--unpack <expression>', 'do not pack files matching glob <expression>')
.option('--unpack-dir <expression>', 'do not pack dirs matching glob <expression> or starting with literal <expression>')
.option('--exclude-hidden', 'exclude hidden files')
.action(function (dir, output, options) {
options = {
unpack: options.unpack,
unpackDir: options.unpackDir,
ordering: options.ordering,
version: options.sv,
arch: options.sa,
builddir: options.sb,
dot: !options.excludeHidden
}
asar.createPackageWithOptions(dir, output, options).catch(error => {
console.error(error)
process.exit(1)
})
})
program.command('list <archive>')
.alias('l')
.description('list files of asar archive')
.option('-i, --is-pack', 'each file in the asar is pack or unpack')
.action(function (archive, options) {
options = {
isPack: options.isPack
}
var files = asar.listPackage(archive, options)
for (var i in files) {
console.log(files[i])
}
})
program.command('extract-file <archive> <filename>')
.alias('ef')
.description('extract one file from archive')
.action(function (archive, filename) {
require('fs').writeFileSync(require('path').basename(filename),
asar.extractFile(archive, filename))
})
program.command('extract <archive> <dest>')
.alias('e')
.description('extract archive')
.action(function (archive, dest) {
asar.extractAll(archive, dest)
})
program.command('*')
.action(function (_cmd, args) {
console.log('asar: \'%s\' is not an asar command. See \'asar --help\'.', args[0])
})
program.parse(process.argv)
if (program.args.length === 0) {
program.help()
}

@ -1,229 +0,0 @@
'use strict'
const fs = require('./wrapped-fs')
const path = require('path')
const minimatch = require('minimatch')
const Filesystem = require('./filesystem')
const disk = require('./disk')
const crawlFilesystem = require('./crawlfs')
/**
* Whether a directory should be excluded from packing due to the `--unpack-dir" option.
*
* @param {string} dirPath - directory path to check
* @param {string} pattern - literal prefix [for backward compatibility] or glob pattern
* @param {array} unpackDirs - Array of directory paths previously marked as unpacked
*/
function isUnpackedDir (dirPath, pattern, unpackDirs) {
if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) {
if (!unpackDirs.includes(dirPath)) {
unpackDirs.push(dirPath)
}
return true
} else {
return unpackDirs.some(unpackDir => dirPath.startsWith(unpackDir))
}
}
module.exports.createPackage = async function (src, dest) {
return module.exports.createPackageWithOptions(src, dest, {})
}
module.exports.createPackageWithOptions = async function (src, dest, options) {
const globOptions = options.globOptions ? options.globOptions : {}
globOptions.dot = options.dot === undefined ? true : options.dot
const pattern = src + (options.pattern ? options.pattern : '/**/*')
const [filenames, metadata] = await crawlFilesystem(pattern, globOptions)
return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options)
}
/**
* Create an ASAR archive from a list of filenames.
*
* @param {string} src: Base path. All files are relative to this.
* @param {string} dest: Archive filename (& path).
* @param {array} filenames: List of filenames relative to src.
* @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
* @param {object} options: Options passed to `createPackageWithOptions`.
*/
module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) {
if (typeof metadata === 'undefined' || metadata === null) { metadata = {} }
if (typeof options === 'undefined' || options === null) { options = {} }
src = path.normalize(src)
dest = path.normalize(dest)
filenames = filenames.map(function (filename) { return path.normalize(filename) })
const filesystem = new Filesystem(src)
const files = []
const unpackDirs = []
let filenamesSorted = []
if (options.ordering) {
const orderingFiles = (await fs.readFile(options.ordering)).toString().split('\n').map(line => {
if (line.includes(':')) { line = line.split(':').pop() }
line = line.trim()
if (line.startsWith('/')) { line = line.slice(1) }
return line
})
const ordering = []
for (const file of orderingFiles) {
const pathComponents = file.split(path.sep)
let str = src
for (const pathComponent of pathComponents) {
str = path.join(str, pathComponent)
ordering.push(str)
}
}
let missing = 0
const total = filenames.length
for (const file of ordering) {
if (!filenamesSorted.includes(file) && filenames.includes(file)) {
filenamesSorted.push(file)
}
}
for (const file of filenames) {
if (!filenamesSorted.includes(file)) {
filenamesSorted.push(file)
missing += 1
}
}
console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`)
} else {
filenamesSorted = filenames
}
const handleFile = async function (filename) {
if (!metadata[filename]) {
metadata[filename] = await crawlFilesystem.determineFileType(filename)
}
const file = metadata[filename]
let shouldUnpack
switch (file.type) {
case 'directory':
if (options.unpackDir) {
shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs)
} else {
shouldUnpack = false
}
filesystem.insertDirectory(filename, shouldUnpack)
break
case 'file':
shouldUnpack = false
if (options.unpack) {
shouldUnpack = minimatch(filename, options.unpack, { matchBase: true })
}
if (!shouldUnpack && options.unpackDir) {
const dirName = path.relative(src, path.dirname(filename))
shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs)
}
files.push({ filename: filename, unpack: shouldUnpack })
return filesystem.insertFile(filename, shouldUnpack, file, options)
case 'link':
filesystem.insertLink(filename)
break
}
return Promise.resolve()
}
const insertsDone = async function () {
await fs.mkdirp(path.dirname(dest))
return disk.writeFilesystem(dest, filesystem, files, metadata)
}
const names = filenamesSorted.slice()
const next = async function (name) {
if (!name) { return insertsDone() }
await handleFile(name)
return next(names.shift())
}
return next(names.shift())
}
module.exports.statFile = function (archive, filename, followLinks) {
const filesystem = disk.readFilesystemSync(archive)
return filesystem.getFile(filename, followLinks)
}
module.exports.getRawHeader = function (archive) {
return disk.readArchiveHeaderSync(archive)
}
module.exports.listPackage = function (archive, options) {
return disk.readFilesystemSync(archive).listFiles(options)
}
module.exports.extractFile = function (archive, filename) {
const filesystem = disk.readFilesystemSync(archive)
return disk.readFileSync(filesystem, filename, filesystem.getFile(filename))
}
module.exports.extractAll = function (archive, dest) {
const filesystem = disk.readFilesystemSync(archive)
const filenames = filesystem.listFiles()
// under windows just extract links as regular files
const followLinks = process.platform === 'win32'
// create destination directory
fs.mkdirpSync(dest)
const extractionErrors = []
for (const fullPath of filenames) {
// Remove leading slash
const filename = fullPath.substr(1)
const destFilename = path.join(dest, filename)
const file = filesystem.getFile(filename, followLinks)
if (file.files) {
// it's a directory, create it and continue with the next entry
fs.mkdirpSync(destFilename)
} else if (file.link) {
// it's a symlink, create a symlink
const linkSrcPath = path.dirname(path.join(dest, file.link))
const linkDestPath = path.dirname(destFilename)
const relativePath = path.relative(linkDestPath, linkSrcPath)
// try to delete output file, because we can't overwrite a link
try {
fs.unlinkSync(destFilename)
} catch {}
const linkTo = path.join(relativePath, path.basename(file.link))
fs.symlinkSync(linkTo, destFilename)
} else {
// it's a file, try to extract it
try {
const content = disk.readFileSync(filesystem, filename, file)
fs.writeFileSync(destFilename, content)
if (file.executable) {
fs.chmodSync(destFilename, '755')
}
} catch (e) {
extractionErrors.push(e)
}
}
}
if (extractionErrors.length) {
throw new Error(
'Unable to extract some files:\n\n' +
extractionErrors.map(error => error.stack).join('\n\n'))
}
}
module.exports.uncache = function (archive) {
return disk.uncacheFilesystem(archive)
}
module.exports.uncacheAll = function () {
disk.uncacheAll()
}

@ -1,41 +0,0 @@
'use strict'
const { promisify } = require('util')
const fs = require('./wrapped-fs')
const glob = promisify(require('glob'))
async function determineFileType (filename) {
const stat = await fs.lstat(filename)
if (stat.isFile()) {
return { type: 'file', stat }
} else if (stat.isDirectory()) {
return { type: 'directory', stat }
} else if (stat.isSymbolicLink()) {
return { type: 'link', stat }
}
}
module.exports = async function (dir, options) {
const metadata = {}
const crawled = await glob(dir, options)
const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)]))
const links = []
const filenames = results.map(([filename, type]) => {
if (type) {
metadata[filename] = type
if (type.type === 'link') links.push(filename)
}
return filename
}).filter((filename) => {
// Newer glob can return files inside symlinked directories, to avoid
// those appearing in archives we need to manually exclude theme here
const exactLinkIndex = links.findIndex(link => filename === link)
return links.every((link, index) => {
if (index === exactLinkIndex) return true
return !filename.startsWith(link)
})
})
return [filenames, metadata]
}
module.exports.determineFileType = determineFileType

@ -1,123 +0,0 @@
'use strict'
const fs = require('./wrapped-fs')
const path = require('path')
const pickle = require('./pickle')
const Filesystem = require('./filesystem')
let filesystemCache = {}
async function copyFile (dest, src, filename) {
const srcFile = path.join(src, filename)
const targetFile = path.join(dest, filename)
const [content, stats] = await Promise.all([fs.readFile(srcFile), fs.stat(srcFile), fs.mkdirp(path.dirname(targetFile))])
return fs.writeFile(targetFile, content, { mode: stats.mode })
}
async function streamTransformedFile (originalFilename, outStream, transformed) {
return new Promise((resolve, reject) => {
const stream = fs.createReadStream(transformed ? transformed.path : originalFilename)
stream.pipe(outStream, { end: false })
stream.on('error', reject)
stream.on('end', () => resolve())
})
}
const writeFileListToStream = async function (dest, filesystem, out, list, metadata) {
for (const file of list) {
if (file.unpack) { // the file should not be packed into archive
const filename = path.relative(filesystem.src, file.filename)
await copyFile(`${dest}.unpacked`, filesystem.src, filename)
} else {
await streamTransformedFile(file.filename, out, metadata[file.filename].transformed)
}
}
return out.end()
}
module.exports.writeFilesystem = async function (dest, filesystem, files, metadata) {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(filesystem.header))
const headerBuf = headerPickle.toBuffer()
const sizePickle = pickle.createEmpty()
sizePickle.writeUInt32(headerBuf.length)
const sizeBuf = sizePickle.toBuffer()
const out = fs.createWriteStream(dest)
await new Promise((resolve, reject) => {
out.on('error', reject)
out.write(sizeBuf)
return out.write(headerBuf, () => resolve())
})
return writeFileListToStream(dest, filesystem, out, files, metadata)
}
module.exports.readArchiveHeaderSync = function (archive) {
const fd = fs.openSync(archive, 'r')
let size
let headerBuf
try {
const sizeBuf = Buffer.alloc(8)
if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
throw new Error('Unable to read header size')
}
const sizePickle = pickle.createFromBuffer(sizeBuf)
size = sizePickle.createIterator().readUInt32()
headerBuf = Buffer.alloc(size)
if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
throw new Error('Unable to read header')
}
} finally {
fs.closeSync(fd)
}
const headerPickle = pickle.createFromBuffer(headerBuf)
const header = headerPickle.createIterator().readString()
return { headerString: header, header: JSON.parse(header), headerSize: size }
}
module.exports.readFilesystemSync = function (archive) {
if (!filesystemCache[archive]) {
const header = this.readArchiveHeaderSync(archive)
const filesystem = new Filesystem(archive)
filesystem.header = header.header
filesystem.headerSize = header.headerSize
filesystemCache[archive] = filesystem
}
return filesystemCache[archive]
}
module.exports.uncacheFilesystem = function (archive) {
if (filesystemCache[archive]) {
filesystemCache[archive] = undefined
return true
}
return false
}
module.exports.uncacheAll = function () {
filesystemCache = {}
}
module.exports.readFileSync = function (filesystem, filename, info) {
let buffer = Buffer.alloc(info.size)
if (info.size <= 0) { return buffer }
if (info.unpacked) {
// it's an unpacked file, copy it.
buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename))
} else {
// Node throws an exception when reading 0 bytes into a 0-size buffer,
// so we short-circuit the read in this case.
const fd = fs.openSync(filesystem.src, 'r')
try {
const offset = 8 + filesystem.headerSize + parseInt(info.offset)
fs.readSync(fd, buffer, 0, info.size, offset)
} finally {
fs.closeSync(fd)
}
}
return buffer
}

@ -1,154 +0,0 @@
'use strict'
const fs = require('./wrapped-fs')
const os = require('os')
const path = require('path')
const { promisify } = require('util')
const stream = require('stream')
const getFileIntegrity = require('./integrity')
const UINT32_MAX = 2 ** 32 - 1
const pipeline = promisify(stream.pipeline)
class Filesystem {
constructor (src) {
this.src = path.resolve(src)
this.header = { files: Object.create(null) }
this.offset = BigInt(0)
}
searchNodeFromDirectory (p) {
let json = this.header
const dirs = p.split(path.sep)
for (const dir of dirs) {
if (dir !== '.') {
if (!json.files[dir]) {
json.files[dir] = { files: Object.create(null) }
}
json = json.files[dir]
}
}
return json
}
searchNodeFromPath (p) {
p = path.relative(this.src, p)
if (!p) { return this.header }
const name = path.basename(p)
const node = this.searchNodeFromDirectory(path.dirname(p))
if (node.files == null) {
node.files = Object.create(null)
}
if (node.files[name] == null) {
node.files[name] = Object.create(null)
}
return node.files[name]
}
insertDirectory (p, shouldUnpack) {
const node = this.searchNodeFromPath(p)
if (shouldUnpack) {
node.unpacked = shouldUnpack
}
node.files = node.files || Object.create(null)
return node.files
}
async insertFile (p, shouldUnpack, file, options) {
const dirNode = this.searchNodeFromPath(path.dirname(p))
const node = this.searchNodeFromPath(p)
if (shouldUnpack || dirNode.unpacked) {
node.size = file.stat.size
node.unpacked = true
node.integrity = await getFileIntegrity(p)
return Promise.resolve()
}
let size
const transformed = options.transform && options.transform(p)
if (transformed) {
const tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'asar-'))
const tmpfile = path.join(tmpdir, path.basename(p))
const out = fs.createWriteStream(tmpfile)
const readStream = fs.createReadStream(p)
await pipeline(readStream, transformed, out)
file.transformed = {
path: tmpfile,
stat: await fs.lstat(tmpfile)
}
size = file.transformed.stat.size
} else {
size = file.stat.size
}
// JavaScript cannot precisely present integers >= UINT32_MAX.
if (size > UINT32_MAX) {
throw new Error(`${p}: file size can not be larger than 4.2GB`)
}
node.size = size
node.offset = this.offset.toString()
node.integrity = await getFileIntegrity(p)
if (process.platform !== 'win32' && (file.stat.mode & 0o100)) {
node.executable = true
}
this.offset += BigInt(size)
}
insertLink (p) {
const link = path.relative(fs.realpathSync(this.src), fs.realpathSync(p))
if (link.substr(0, 2) === '..') {
throw new Error(`${p}: file "${link}" links out of the package`)
}
const node = this.searchNodeFromPath(p)
node.link = link
return link
}
listFiles (options) {
const files = []
const fillFilesFromMetadata = function (basePath, metadata) {
if (!metadata.files) {
return
}
for (const [childPath, childMetadata] of Object.entries(metadata.files)) {
const fullPath = path.join(basePath, childPath)
const packState = childMetadata.unpacked ? 'unpack' : 'pack '
files.push((options && options.isPack) ? `${packState} : ${fullPath}` : fullPath)
fillFilesFromMetadata(fullPath, childMetadata)
}
}
fillFilesFromMetadata('/', this.header)
return files
}
getNode (p) {
const node = this.searchNodeFromDirectory(path.dirname(p))
const name = path.basename(p)
if (name) {
return node.files[name]
} else {
return node
}
}
getFile (p, followLinks) {
followLinks = typeof followLinks === 'undefined' ? true : followLinks
const info = this.getNode(p)
// if followLinks is false we don't resolve symlinks
if (info.link && followLinks) {
return this.getFile(info.link)
} else {
return info
}
}
}
module.exports = Filesystem

@ -1,250 +0,0 @@
import { Stats } from "fs";
interface IMinimatchOptions {
/**
* Dump a ton of stuff to stderr.
*
* @default false
*/
debug?: boolean | undefined;
/**
* Do not expand `{a,b}` and `{1..3}` brace sets.
*
* @default false
*/
nobrace?: boolean | undefined;
/**
* Disable `**` matching against multiple folder names.
*
* @default false
*/
noglobstar?: boolean | undefined;
/**
* Allow patterns to match filenames starting with a period,
* even if the pattern does not explicitly have a period in that spot.
*
* Note that by default, `'a/**' + '/b'` will **not** match `a/.d/b`, unless `dot` is set.
*
* @default false
*/
dot?: boolean | undefined;
/**
* Disable "extglob" style patterns like `+(a|b)`.
*
* @default false
*/
noext?: boolean | undefined;
/**
* Perform a case-insensitive match.
*
* @default false
*/
nocase?: boolean | undefined;
/**
* When a match is not found by `minimatch.match`,
* return a list containing the pattern itself if this option is set.
* Otherwise, an empty list is returned if there are no matches.
*
* @default false
*/
nonull?: boolean | undefined;
/**
* If set, then patterns without slashes will be matched
* against the basename of the path if it contains slashes. For example,
* `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
*
* @default false
*/
matchBase?: boolean | undefined;
/**
* Suppress the behavior of treating `#` at the start of a pattern as a comment.
*
* @default false
*/
nocomment?: boolean | undefined;
/**
* Suppress the behavior of treating a leading `!` character as negation.
*
* @default false
*/
nonegate?: boolean | undefined;
/**
* Returns from negate expressions the same as if they were not negated.
* (Ie, true on a hit, false on a miss.)
*
* @default false
*/
flipNegate?: boolean | undefined;
/**
* Compare a partial path to a pattern. As long as the parts of the path that
* are present are not contradicted by the pattern, it will be treated as a
* match. This is useful in applications where you're walking through a
* folder structure, and don't yet have the full path, but want to ensure that
* you do not walk down paths that can never be a match.
*
* @default false
*
* @example
* import minimatch = require("minimatch");
*
* minimatch('/a/b', '/a/*' + '/c/d', { partial: true }) // true, might be /a/b/c/d
* minimatch('/a/b', '/**' + '/d', { partial: true }) // true, might be /a/b/.../d
* minimatch('/x/y/z', '/a/**' + '/z', { partial: true }) // false, because x !== a
*/
partial?: boolean;
/**
* Use `\\` as a path separator _only_, and _never_ as an escape
* character. If set, all `\\` characters are replaced with `/` in
* the pattern. Note that this makes it **impossible** to match
* against paths containing literal glob pattern characters, but
* allows matching with patterns constructed using `path.join()` and
* `path.resolve()` on Windows platforms, mimicking the (buggy!)
* behavior of earlier versions on Windows. Please use with
* caution, and be mindful of the caveat about Windows paths
*
* For legacy reasons, this is also set if
* `options.allowWindowsEscape` is set to the exact value `false`.
*
* @default false
*/
windowsPathsNoEscape?: boolean;
}
import fs = require("fs");
interface IGlobOptions extends IMinimatchOptions {
cwd?: string | undefined;
root?: string | undefined;
dot?: boolean | undefined;
nomount?: boolean | undefined;
mark?: boolean | undefined;
nosort?: boolean | undefined;
stat?: boolean | undefined;
silent?: boolean | undefined;
strict?: boolean | undefined;
cache?:
| { [path: string]: boolean | "DIR" | "FILE" | ReadonlyArray<string> }
| undefined;
statCache?:
| { [path: string]: false | { isDirectory(): boolean } | undefined }
| undefined;
symlinks?: { [path: string]: boolean | undefined } | undefined;
realpathCache?: { [path: string]: string } | undefined;
sync?: boolean | undefined;
nounique?: boolean | undefined;
nonull?: boolean | undefined;
debug?: boolean | undefined;
nobrace?: boolean | undefined;
noglobstar?: boolean | undefined;
noext?: boolean | undefined;
nocase?: boolean | undefined;
matchBase?: any;
nodir?: boolean | undefined;
ignore?: string | ReadonlyArray<string> | undefined;
follow?: boolean | undefined;
realpath?: boolean | undefined;
nonegate?: boolean | undefined;
nocomment?: boolean | undefined;
absolute?: boolean | undefined;
allowWindowsEscape?: boolean | undefined;
fs?: typeof fs;
}
export type CreateOptions = {
dot?: boolean;
globOptions?: IGlobOptions;
ordering?: string;
pattern?: string;
transform?: (filePath: string) => NodeJS.ReadWriteStream | void;
unpack?: string;
unpackDir?: string;
};
export type ListOptions = {
isPack: boolean;
};
export type EntryMetadata = {
unpacked: boolean;
};
export type DirectoryMetadata = EntryMetadata & {
files: { [property: string]: EntryMetadata };
};
export type FileMetadata = EntryMetadata & {
executable?: true;
offset?: number;
size?: number;
};
export type LinkMetadata = {
link: string;
};
export type Metadata = DirectoryMetadata | FileMetadata | LinkMetadata;
export type InputMetadataType = 'directory' | 'file' | 'link';
export type InputMetadata = {
[property: string]: {
type: InputMetadataType;
stat: Stats;
}
};
export type DirectoryRecord = {
files: Record<string, DirectoryRecord | FileRecord>;
};
export type FileRecord = {
offset: string;
size: number;
executable?: boolean;
integrity: {
hash: string;
algorithm: 'SHA256';
blocks: string[];
blockSize: number;
};
}
export type ArchiveHeader = {
// The JSON parsed header string
header: DirectoryRecord;
headerString: string;
headerSize: number;
}
export function createPackage(src: string, dest: string): Promise<void>;
export function createPackageWithOptions(
src: string,
dest: string,
options: CreateOptions
): Promise<void>;
export function createPackageFromFiles(
src: string,
dest: string,
filenames: string[],
metadata?: InputMetadata,
options?: CreateOptions
): Promise<void>;
export function statFile(archive: string, filename: string, followLinks?: boolean): Metadata;
export function getRawHeader(archive: string): ArchiveHeader;
export function listPackage(archive: string, options?: ListOptions): string[];
export function extractFile(archive: string, filename: string): Buffer;
export function extractAll(archive: string, dest: string): void;
export function uncache(archive: string): boolean;
export function uncacheAll(): void;

@ -1,62 +0,0 @@
const crypto = require('crypto')
const fs = require('fs')
const stream = require('stream')
const { promisify } = require('util')
const ALGORITHM = 'SHA256'
// 4MB default block size
const BLOCK_SIZE = 4 * 1024 * 1024
const pipeline = promisify(stream.pipeline)
function hashBlock (block) {
return crypto.createHash(ALGORITHM).update(block).digest('hex')
}
async function getFileIntegrity (path) {
const fileHash = crypto.createHash(ALGORITHM)
const blocks = []
let currentBlockSize = 0
let currentBlock = []
await pipeline(
fs.createReadStream(path),
new stream.PassThrough({
decodeStrings: false,
transform (_chunk, encoding, callback) {
fileHash.update(_chunk)
function handleChunk (chunk) {
const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength)
currentBlockSize += diffToSlice
currentBlock.push(chunk.slice(0, diffToSlice))
if (currentBlockSize === BLOCK_SIZE) {
blocks.push(hashBlock(Buffer.concat(currentBlock)))
currentBlock = []
currentBlockSize = 0
}
if (diffToSlice < chunk.byteLength) {
handleChunk(chunk.slice(diffToSlice))
}
}
handleChunk(_chunk)
callback()
},
flush (callback) {
blocks.push(hashBlock(Buffer.concat(currentBlock)))
currentBlock = []
callback()
}
})
)
return {
algorithm: ALGORITHM,
hash: fileHash.digest('hex'),
blockSize: BLOCK_SIZE,
blocks: blocks
}
}
module.exports = getFileIntegrity

@ -1,230 +0,0 @@
// sizeof(T).
const SIZE_INT32 = 4
const SIZE_UINT32 = 4
const SIZE_INT64 = 8
const SIZE_UINT64 = 8
const SIZE_FLOAT = 4
const SIZE_DOUBLE = 8
// The allocation granularity of the payload.
const PAYLOAD_UNIT = 64
// Largest JS number.
const CAPACITY_READ_ONLY = 9007199254740992
// Aligns 'i' by rounding it up to the next multiple of 'alignment'.
const alignInt = function (i, alignment) {
return i + (alignment - (i % alignment)) % alignment
}
// PickleIterator reads data from a Pickle. The Pickle object must remain valid
// while the PickleIterator object is in use.
const PickleIterator = (function () {
function PickleIterator (pickle) {
this.payload = pickle.header
this.payloadOffset = pickle.headerSize
this.readIndex = 0
this.endIndex = pickle.getPayloadSize()
}
PickleIterator.prototype.readBool = function () {
return this.readInt() !== 0
}
PickleIterator.prototype.readInt = function () {
return this.readBytes(SIZE_INT32, Buffer.prototype.readInt32LE)
}
PickleIterator.prototype.readUInt32 = function () {
return this.readBytes(SIZE_UINT32, Buffer.prototype.readUInt32LE)
}
PickleIterator.prototype.readInt64 = function () {
return this.readBytes(SIZE_INT64, Buffer.prototype.readInt64LE)
}
PickleIterator.prototype.readUInt64 = function () {
return this.readBytes(SIZE_UINT64, Buffer.prototype.readUInt64LE)
}
PickleIterator.prototype.readFloat = function () {
return this.readBytes(SIZE_FLOAT, Buffer.prototype.readFloatLE)
}
PickleIterator.prototype.readDouble = function () {
return this.readBytes(SIZE_DOUBLE, Buffer.prototype.readDoubleLE)
}
PickleIterator.prototype.readString = function () {
return this.readBytes(this.readInt()).toString()
}
PickleIterator.prototype.readBytes = function (length, method) {
const readPayloadOffset = this.getReadPayloadOffsetAndAdvance(length)
if (method != null) {
return method.call(this.payload, readPayloadOffset, length)
} else {
return this.payload.slice(readPayloadOffset, readPayloadOffset + length)
}
}
PickleIterator.prototype.getReadPayloadOffsetAndAdvance = function (length) {
if (length > this.endIndex - this.readIndex) {
this.readIndex = this.endIndex
throw new Error('Failed to read data with length of ' + length)
}
const readPayloadOffset = this.payloadOffset + this.readIndex
this.advance(length)
return readPayloadOffset
}
PickleIterator.prototype.advance = function (size) {
const alignedSize = alignInt(size, SIZE_UINT32)
if (this.endIndex - this.readIndex < alignedSize) {
this.readIndex = this.endIndex
} else {
this.readIndex += alignedSize
}
}
return PickleIterator
})()
// This class provides facilities for basic binary value packing and unpacking.
//
// The Pickle class supports appending primitive values (ints, strings, etc.)
// to a pickle instance. The Pickle instance grows its internal memory buffer
// dynamically to hold the sequence of primitive values. The internal memory
// buffer is exposed as the "data" of the Pickle. This "data" can be passed
// to a Pickle object to initialize it for reading.
//
// When reading from a Pickle object, it is important for the consumer to know
// what value types to read and in what order to read them as the Pickle does
// not keep track of the type of data written to it.
//
// The Pickle's data has a header which contains the size of the Pickle's
// payload. It can optionally support additional space in the header. That
// space is controlled by the header_size parameter passed to the Pickle
// constructor.
const Pickle = (function () {
function Pickle (buffer) {
if (buffer) {
this.initFromBuffer(buffer)
} else {
this.initEmpty()
}
}
Pickle.prototype.initEmpty = function () {
this.header = Buffer.alloc(0)
this.headerSize = SIZE_UINT32
this.capacityAfterHeader = 0
this.writeOffset = 0
this.resize(PAYLOAD_UNIT)
this.setPayloadSize(0)
}
Pickle.prototype.initFromBuffer = function (buffer) {
this.header = buffer
this.headerSize = buffer.length - this.getPayloadSize()
this.capacityAfterHeader = CAPACITY_READ_ONLY
this.writeOffset = 0
if (this.headerSize > buffer.length) {
this.headerSize = 0
}
if (this.headerSize !== alignInt(this.headerSize, SIZE_UINT32)) {
this.headerSize = 0
}
if (this.headerSize === 0) {
this.header = Buffer.alloc(0)
}
}
Pickle.prototype.createIterator = function () {
return new PickleIterator(this)
}
Pickle.prototype.toBuffer = function () {
return this.header.slice(0, this.headerSize + this.getPayloadSize())
}
Pickle.prototype.writeBool = function (value) {
return this.writeInt(value ? 1 : 0)
}
Pickle.prototype.writeInt = function (value) {
return this.writeBytes(value, SIZE_INT32, Buffer.prototype.writeInt32LE)
}
Pickle.prototype.writeUInt32 = function (value) {
return this.writeBytes(value, SIZE_UINT32, Buffer.prototype.writeUInt32LE)
}
Pickle.prototype.writeInt64 = function (value) {
return this.writeBytes(value, SIZE_INT64, Buffer.prototype.writeInt64LE)
}
Pickle.prototype.writeUInt64 = function (value) {
return this.writeBytes(value, SIZE_UINT64, Buffer.prototype.writeUInt64LE)
}
Pickle.prototype.writeFloat = function (value) {
return this.writeBytes(value, SIZE_FLOAT, Buffer.prototype.writeFloatLE)
}
Pickle.prototype.writeDouble = function (value) {
return this.writeBytes(value, SIZE_DOUBLE, Buffer.prototype.writeDoubleLE)
}
Pickle.prototype.writeString = function (value) {
const length = Buffer.byteLength(value, 'utf8')
if (!this.writeInt(length)) {
return false
}
return this.writeBytes(value, length)
}
Pickle.prototype.setPayloadSize = function (payloadSize) {
return this.header.writeUInt32LE(payloadSize, 0)
}
Pickle.prototype.getPayloadSize = function () {
return this.header.readUInt32LE(0)
}
Pickle.prototype.writeBytes = function (data, length, method) {
const dataLength = alignInt(length, SIZE_UINT32)
const newSize = this.writeOffset + dataLength
if (newSize > this.capacityAfterHeader) {
this.resize(Math.max(this.capacityAfterHeader * 2, newSize))
}
if (method != null) {
method.call(this.header, data, this.headerSize + this.writeOffset)
} else {
this.header.write(data, this.headerSize + this.writeOffset, length)
}
const endOffset = this.headerSize + this.writeOffset + length
this.header.fill(0, endOffset, endOffset + dataLength - length)
this.setPayloadSize(newSize)
this.writeOffset = newSize
return true
}
Pickle.prototype.resize = function (newCapacity) {
newCapacity = alignInt(newCapacity, PAYLOAD_UNIT)
this.header = Buffer.concat([this.header, Buffer.alloc(newCapacity)])
this.capacityAfterHeader = newCapacity
}
return Pickle
})()
module.exports = {
createEmpty: function () {
return new Pickle()
},
createFromBuffer: function (buffer) {
return new Pickle(buffer)
}
}

@ -1,26 +0,0 @@
'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const promisifiedMethods = [
'lstat',
'mkdtemp',
'readFile',
'stat',
'writeFile'
]
const promisified = {}
for (const method of Object.keys(fs)) {
if (promisifiedMethods.includes(method)) {
promisified[method] = fs.promises[method]
} else {
promisified[method] = fs[method]
}
}
// To make it more like fs-extra
promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true })
promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true })
module.exports = promisified

@ -1,64 +0,0 @@
{
"name": "@electron/asar",
"description": "Creating Electron app packages",
"version": "3.2.7",
"main": "./lib/asar.js",
"types": "./lib/index.d.ts",
"bin": {
"asar": "./bin/asar.js"
},
"files": [
"bin",
"lib",
"lib/index.d.ts"
],
"engines": {
"node": ">=10.12.0"
},
"license": "MIT",
"homepage": "https://github.com/electron/asar",
"repository": {
"type": "git",
"url": "https://github.com/electron/asar.git"
},
"bugs": {
"url": "https://github.com/electron/asar/issues"
},
"scripts": {
"mocha": "xvfb-maybe electron-mocha --reporter spec && mocha --reporter spec",
"test": "npm run lint && npm run mocha",
"lint": "tsd && standard",
"standard": "standard",
"tsd": "tsd"
},
"standard": {
"env": {
"mocha": true
},
"globals": [
"BigInt"
]
},
"tsd": {
"directory": "test"
},
"dependencies": {
"commander": "^5.0.0",
"glob": "^7.1.6",
"minimatch": "^3.0.4"
},
"devDependencies": {
"@continuous-auth/semantic-release-npm": "^3.0.0",
"electron": "^22.0.0",
"electron-mocha": "^11.0.2",
"lodash": "^4.17.15",
"mocha": "^10.1.0",
"rimraf": "^3.0.2",
"standard": "^14.3.3",
"tsd": "^0.25.0",
"xvfb-maybe": "^0.2.1"
},
"__npminstall_done": true,
"_from": "@electron/asar@3.2.7",
"_resolved": "https://registry.npmmirror.com/@electron/asar/-/asar-3.2.7.tgz"
}

@ -1 +0,0 @@
../../commander@5.1.0/node_modules/commander

@ -1 +0,0 @@
../../glob@7.2.3/node_modules/glob

@ -1 +0,0 @@
../../minimatch@3.1.2/node_modules/minimatch

@ -1,21 +0,0 @@
MIT License
Copyright (c) Contributors to the Electron project
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -1,138 +0,0 @@
# @electron/get
> Download Electron release artifacts
[![CircleCI](https://circleci.com/gh/electron/get.svg?style=shield)](https://circleci.com/gh/electron/get)
[![NPM package](https://img.shields.io/npm/v/@electron/get)](https://npm.im/@electron/get)
## Usage
### Simple: Downloading an Electron Binary ZIP
```typescript
import { download } from '@electron/get';
// NB: Use this syntax within an async function, Node does not have support for
// top-level await as of Node 12.
const zipFilePath = await download('4.0.4');
```
### Advanced: Downloading a macOS Electron Symbol File
```typescript
import { downloadArtifact } from '@electron/get';
// NB: Use this syntax within an async function, Node does not have support for
// top-level await as of Node 12.
const zipFilePath = await downloadArtifact({
version: '4.0.4',
platform: 'darwin',
artifactName: 'electron',
artifactSuffix: 'symbols',
arch: 'x64',
});
```
### Specifying a mirror
To specify another location to download Electron assets from, the following options are
available:
* `mirrorOptions` Object
* `mirror` String (optional) - The base URL of the mirror to download from.
* `nightlyMirror` String (optional) - The Electron nightly-specific mirror URL.
* `customDir` String (optional) - The name of the directory to download from, often scoped by version number.
* `customFilename` String (optional) - The name of the asset to download.
* `resolveAssetURL` Function (optional) - A function allowing customization of the url used to download the asset.
Anatomy of a download URL, in terms of `mirrorOptions`:
```
https://github.com/electron/electron/releases/download/v4.0.4/electron-v4.0.4-linux-x64.zip
| | | |
------------------------------------------------------- -----------------------------
| |
mirror / nightlyMirror | | customFilename
------
||
customDir
```
Example:
```typescript
import { download } from '@electron/get';
const zipFilePath = await download('4.0.4', {
mirrorOptions: {
mirror: 'https://mirror.example.com/electron/',
customDir: 'custom',
customFilename: 'unofficial-electron-linux.zip'
}
});
// Will download from https://mirror.example.com/electron/custom/unofficial-electron-linux.zip
const nightlyZipFilePath = await download('8.0.0-nightly.20190901', {
mirrorOptions: {
nightlyMirror: 'https://nightly.example.com/',
customDir: 'nightlies',
customFilename: 'nightly-linux.zip'
}
});
// Will download from https://nightly.example.com/nightlies/nightly-linux.zip
```
`customDir` can have the placeholder `{{ version }}`, which will be replaced by the version
specified (without the leading `v`). For example:
```javascript
const zipFilePath = await download('4.0.4', {
mirrorOptions: {
mirror: 'https://mirror.example.com/electron/',
customDir: 'version-{{ version }}',
platform: 'linux',
arch: 'x64'
}
});
// Will download from https://mirror.example.com/electron/version-4.0.4/electron-v4.0.4-linux-x64.zip
```
#### Using environment variables for mirror options
Mirror options can also be specified via the following environment variables:
* `ELECTRON_CUSTOM_DIR` - Specifies the custom directory to download from.
* `ELECTRON_CUSTOM_FILENAME` - Specifies the custom file name to download.
* `ELECTRON_MIRROR` - Specifies the URL of the server to download from if the version is not a nightly version.
* `ELECTRON_NIGHTLY_MIRROR` - Specifies the URL of the server to download from if the version is a nightly version.
### Overriding the version downloaded
The version downloaded can be overriden by setting the `ELECTRON_CUSTOM_VERSION` environment variable.
Setting this environment variable will override the version passed in to `download` or `downloadArtifact`.
## How It Works
This module downloads Electron to a known place on your system and caches it
so that future requests for that asset can be returned instantly. The cache
locations are:
* Linux: `$XDG_CACHE_HOME` or `~/.cache/electron/`
* MacOS: `~/Library/Caches/electron/`
* Windows: `%LOCALAPPDATA%/electron/Cache` or `~/AppData/Local/electron/Cache/`
By default, the module uses [`got`](https://github.com/sindresorhus/got) as the
downloader. As a result, you can use the same [options](https://github.com/sindresorhus/got#options)
via `downloadOptions`.
### Progress Bar
By default, a progress bar is shown when downloading an artifact for more than 30 seconds. To
disable, set the `ELECTRON_GET_NO_PROGRESS` environment variable to any non-empty value, or set
`quiet` to `true` in `downloadOptions`. If you need to monitor progress yourself via the API, set
`getProgressCallback` in `downloadOptions`, which has the same function signature as `got`'s
[`downloadProgress` event callback](https://github.com/sindresorhus/got#ondownloadprogress-progress).
### Proxies
Downstream packages should utilize the `initializeProxy` function to add HTTP(S) proxy support. If
the environment variable `ELECTRON_GET_USE_PROXY` is set, it is called automatically.

@ -1,8 +0,0 @@
export declare class Cache {
private cacheRoot;
constructor(cacheRoot?: string);
static getCacheDirectory(downloadUrl: string): string;
getCachePath(downloadUrl: string, fileName: string): string;
getPathForFileInCache(url: string, fileName: string): Promise<string | null>;
putFileInCache(url: string, currentPath: string, fileName: string): Promise<string>;
}

@ -1,60 +0,0 @@
"use strict";
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
Object.defineProperty(exports, "__esModule", { value: true });
const debug_1 = require("debug");
const env_paths_1 = require("env-paths");
const fs = require("fs-extra");
const path = require("path");
const url = require("url");
const crypto = require("crypto");
const d = debug_1.default('@electron/get:cache');
const defaultCacheRoot = env_paths_1.default('electron', {
suffix: '',
}).cache;
class Cache {
constructor(cacheRoot = defaultCacheRoot) {
this.cacheRoot = cacheRoot;
}
static getCacheDirectory(downloadUrl) {
const parsedDownloadUrl = url.parse(downloadUrl);
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { search, hash, pathname } = parsedDownloadUrl, rest = __rest(parsedDownloadUrl, ["search", "hash", "pathname"]);
const strippedUrl = url.format(Object.assign(Object.assign({}, rest), { pathname: path.dirname(pathname || 'electron') }));
return crypto
.createHash('sha256')
.update(strippedUrl)
.digest('hex');
}
getCachePath(downloadUrl, fileName) {
return path.resolve(this.cacheRoot, Cache.getCacheDirectory(downloadUrl), fileName);
}
async getPathForFileInCache(url, fileName) {
const cachePath = this.getCachePath(url, fileName);
if (await fs.pathExists(cachePath)) {
return cachePath;
}
return null;
}
async putFileInCache(url, currentPath, fileName) {
const cachePath = this.getCachePath(url, fileName);
d(`Moving ${currentPath} to ${cachePath}`);
if (await fs.pathExists(cachePath)) {
d('* Replacing existing file');
await fs.remove(cachePath);
}
await fs.move(currentPath, cachePath);
return cachePath;
}
}
exports.Cache = Cache;
//# sourceMappingURL=Cache.js.map

@ -1 +0,0 @@
{"version":3,"file":"Cache.js","sourceRoot":"","sources":["../../src/Cache.ts"],"names":[],"mappings":";;;;;;;;;;;;;AAAA,iCAA0B;AAC1B,yCAAiC;AACjC,+BAA+B;AAC/B,6BAA6B;AAC7B,2BAA2B;AAC3B,iCAAiC;AAEjC,MAAM,CAAC,GAAG,eAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC,MAAM,gBAAgB,GAAG,mBAAQ,CAAC,UAAU,EAAE;IAC5C,MAAM,EAAE,EAAE;CACX,CAAC,CAAC,KAAK,CAAC;AAET,MAAa,KAAK;IAChB,YAAoB,YAAY,gBAAgB;QAA5B,cAAS,GAAT,SAAS,CAAmB;IAAG,CAAC;IAE7C,MAAM,CAAC,iBAAiB,CAAC,WAAmB;QACjD,MAAM,iBAAiB,GAAG,GAAG,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;QACjD,6DAA6D;QAC7D,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,KAAc,iBAAiB,EAA7B,gEAA6B,CAAC;QAC9D,MAAM,WAAW,GAAG,GAAG,CAAC,MAAM,iCAAM,IAAI,KAAE,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,UAAU,CAAC,IAAG,CAAC;QAE5F,OAAO,MAAM;aACV,UAAU,CAAC,QAAQ,CAAC;aACpB,MAAM,CAAC,WAAW,CAAC;aACnB,MAAM,CAAC,KAAK,CAAC,CAAC;IACnB,CAAC;IAEM,YAAY,CAAC,WAAmB,EAAE,QAAgB;QACvD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,iBAAiB,CAAC,WAAW,CAAC,EAAE,QAAQ,CAAC,CAAC;IACtF,CAAC;IAEM,KAAK,CAAC,qBAAqB,CAAC,GAAW,EAAE,QAAgB;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACnD,IAAI,MAAM,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAClC,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,KAAK,CAAC,cAAc,CAAC,GAAW,EAAE,WAAmB,EAAE,QAAgB;QAC5E,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACnD,CAAC,CAAC,UAAU,WAAW,OAAO,SAAS,EAAE,CAAC,CAAC;QAC3C,IAAI,MAAM,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAClC,CAAC,CAAC,2BAA2B,CAAC,CAAC;YAC/B,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SAC5B;QAED,MAAM,EAAE,CAAC,IAAI,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;QAEtC,OAAO,SAAS,CAAC;IACnB,CAAC;CACF;AAxCD,sBAwCC"}

@ -1,3 +0,0 @@
export interface Downloader<T> {
download(url: string, targetFilePath: string, options: T): Promise<void>;
}

@ -1,3 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=Downloader.js.map

@ -1 +0,0 @@
{"version":3,"file":"Downloader.js","sourceRoot":"","sources":["../../src/Downloader.ts"],"names":[],"mappings":""}

@ -1,21 +0,0 @@
import { Progress as GotProgress, Options as GotOptions } from 'got';
import { Downloader } from './Downloader';
/**
* See [`got#options`](https://github.com/sindresorhus/got#options) for possible keys/values.
*/
export declare type GotDownloaderOptions = (GotOptions & {
isStream?: true;
}) & {
/**
* if defined, triggers every time `got`'s `downloadProgress` event callback is triggered.
*/
getProgressCallback?: (progress: GotProgress) => Promise<void>;
/**
* if `true`, disables the console progress bar (setting the `ELECTRON_GET_NO_PROGRESS`
* environment variable to a non-empty value also does this).
*/
quiet?: boolean;
};
export declare class GotDownloader implements Downloader<GotDownloaderOptions> {
download(url: string, targetFilePath: string, options?: GotDownloaderOptions): Promise<void>;
}

@ -1,76 +0,0 @@
"use strict";
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs-extra");
const got_1 = require("got");
const path = require("path");
const ProgressBar = require("progress");
const PROGRESS_BAR_DELAY_IN_SECONDS = 30;
class GotDownloader {
async download(url, targetFilePath, options) {
if (!options) {
options = {};
}
const { quiet, getProgressCallback } = options, gotOptions = __rest(options, ["quiet", "getProgressCallback"]);
let downloadCompleted = false;
let bar;
let progressPercent;
let timeout = undefined;
await fs.mkdirp(path.dirname(targetFilePath));
const writeStream = fs.createWriteStream(targetFilePath);
if (!quiet || !process.env.ELECTRON_GET_NO_PROGRESS) {
const start = new Date();
timeout = setTimeout(() => {
if (!downloadCompleted) {
bar = new ProgressBar(`Downloading ${path.basename(url)}: [:bar] :percent ETA: :eta seconds `, {
curr: progressPercent,
total: 100,
});
// https://github.com/visionmedia/node-progress/issues/159
// eslint-disable-next-line @typescript-eslint/no-explicit-any
bar.start = start;
}
}, PROGRESS_BAR_DELAY_IN_SECONDS * 1000);
}
await new Promise((resolve, reject) => {
const downloadStream = got_1.default.stream(url, gotOptions);
downloadStream.on('downloadProgress', async (progress) => {
progressPercent = progress.percent;
if (bar) {
bar.update(progress.percent);
}
if (getProgressCallback) {
await getProgressCallback(progress);
}
});
downloadStream.on('error', error => {
if (error instanceof got_1.HTTPError && error.response.statusCode === 404) {
error.message += ` for ${error.response.url}`;
}
if (writeStream.destroy) {
writeStream.destroy(error);
}
reject(error);
});
writeStream.on('error', error => reject(error));
writeStream.on('close', () => resolve());
downloadStream.pipe(writeStream);
});
downloadCompleted = true;
if (timeout) {
clearTimeout(timeout);
}
}
}
exports.GotDownloader = GotDownloader;
//# sourceMappingURL=GotDownloader.js.map

@ -1 +0,0 @@
{"version":3,"file":"GotDownloader.js","sourceRoot":"","sources":["../../src/GotDownloader.ts"],"names":[],"mappings":";;;;;;;;;;;;;AAAA,+BAA+B;AAC/B,6BAAqF;AACrF,6BAA6B;AAC7B,wCAAwC;AAIxC,MAAM,6BAA6B,GAAG,EAAE,CAAC;AAiBzC,MAAa,aAAa;IACxB,KAAK,CAAC,QAAQ,CACZ,GAAW,EACX,cAAsB,EACtB,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QACD,MAAM,EAAE,KAAK,EAAE,mBAAmB,KAAoB,OAAO,EAAzB,8DAAyB,CAAC;QAC9D,IAAI,iBAAiB,GAAG,KAAK,CAAC;QAC9B,IAAI,GAA4B,CAAC;QACjC,IAAI,eAAuB,CAAC;QAC5B,IAAI,OAAO,GAA+B,SAAS,CAAC;QACpD,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC;QAC9C,MAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,CAAC,cAAc,CAAC,CAAC;QAEzD,IAAI,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,IAAI,EAAE,CAAC;YACzB,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE;gBACxB,IAAI,CAAC,iBAAiB,EAAE;oBACtB,GAAG,GAAG,IAAI,WAAW,CACnB,eAAe,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,sCAAsC,EACvE;wBACE,IAAI,EAAE,eAAe;wBACrB,KAAK,EAAE,GAAG;qBACX,CACF,CAAC;oBACF,0DAA0D;oBAC1D,8DAA8D;oBAC7D,GAAW,CAAC,KAAK,GAAG,KAAK,CAAC;iBAC5B;YACH,CAAC,EAAE,6BAA6B,GAAG,IAAI,CAAC,CAAC;SAC1C;QACD,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC1C,MAAM,cAAc,GAAG,aAAG,CAAC,MAAM,CAAC,GAAG,EAAE,UAAU,CAAC,CAAC;YACnD,cAAc,CAAC,EAAE,CAAC,kBAAkB,EAAE,KAAK,EAAC,QAAQ,EAAC,EAAE;gBACrD,eAAe,GAAG,QAAQ,CAAC,OAAO,CAAC;gBACnC,IAAI,GAAG,EAAE;oBACP,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;iBAC9B;gBACD,IAAI,mBAAmB,EAAE;oBACvB,MAAM,mBAAmB,CAAC,QAAQ,CAAC,CAAC;iBACrC;YACH,CAAC,CAAC,CAAC;YACH,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;gBACjC,IAAI,KAAK,YAAY,eAAS,IAAI,KAAK,CAAC,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;oBACnE,KAAK,CAAC,OAAO,IAAI,QAAQ,KAAK,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;iBAC/C;gBACD,IAAI,WAAW,CAAC,OAAO,EAAE;oBACvB,WAAW,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;iBAC5B;gBAED,MAAM,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC,CAAC,CAAC;YACH,WAAW,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;YAChD,WAAW,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;YAEzC,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;QAEH,iBAAiB,GAAG,IAAI,CAAC;QACzB,IAAI,OAAO,EAAE;YACX,YAAY,CAAC,OAAO,CAAC,CAAC;SACvB;IACH,CAAC;CACF;AAlED,sCAkEC"}

@ -1,4 +0,0 @@
import { ElectronArtifactDetails } from './types';
export declare function getArtifactFileName(details: ElectronArtifactDetails): string;
export declare function getArtifactRemoteURL(details: ElectronArtifactDetails): Promise<string>;
export declare function getArtifactVersion(details: ElectronArtifactDetails): string;

@ -1,66 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const utils_1 = require("./utils");
const BASE_URL = 'https://github.com/electron/electron/releases/download/';
const NIGHTLY_BASE_URL = 'https://github.com/electron/nightlies/releases/download/';
function getArtifactFileName(details) {
utils_1.ensureIsTruthyString(details, 'artifactName');
if (details.isGeneric) {
return details.artifactName;
}
utils_1.ensureIsTruthyString(details, 'arch');
utils_1.ensureIsTruthyString(details, 'platform');
utils_1.ensureIsTruthyString(details, 'version');
return `${[
details.artifactName,
details.version,
details.platform,
details.arch,
...(details.artifactSuffix ? [details.artifactSuffix] : []),
].join('-')}.zip`;
}
exports.getArtifactFileName = getArtifactFileName;
function mirrorVar(name, options, defaultValue) {
// Convert camelCase to camel_case for env var reading
const snakeName = name.replace(/([a-z])([A-Z])/g, (_, a, b) => `${a}_${b}`).toLowerCase();
return (
// .npmrc
process.env[`npm_config_electron_${name.toLowerCase()}`] ||
process.env[`NPM_CONFIG_ELECTRON_${snakeName.toUpperCase()}`] ||
process.env[`npm_config_electron_${snakeName}`] ||
// package.json
process.env[`npm_package_config_electron_${name}`] ||
process.env[`npm_package_config_electron_${snakeName.toLowerCase()}`] ||
// env
process.env[`ELECTRON_${snakeName.toUpperCase()}`] ||
options[name] ||
defaultValue);
}
async function getArtifactRemoteURL(details) {
const opts = details.mirrorOptions || {};
let base = mirrorVar('mirror', opts, BASE_URL);
if (details.version.includes('nightly')) {
const nightlyDeprecated = mirrorVar('nightly_mirror', opts, '');
if (nightlyDeprecated) {
base = nightlyDeprecated;
console.warn(`nightly_mirror is deprecated, please use nightlyMirror`);
}
else {
base = mirrorVar('nightlyMirror', opts, NIGHTLY_BASE_URL);
}
}
const path = mirrorVar('customDir', opts, details.version).replace('{{ version }}', details.version.replace(/^v/, ''));
const file = mirrorVar('customFilename', opts, getArtifactFileName(details));
// Allow customized download URL resolution.
if (opts.resolveAssetURL) {
const url = await opts.resolveAssetURL(details);
return url;
}
return `${base}${path}/${file}`;
}
exports.getArtifactRemoteURL = getArtifactRemoteURL;
function getArtifactVersion(details) {
return utils_1.normalizeVersion(mirrorVar('customVersion', details.mirrorOptions || {}, details.version));
}
exports.getArtifactVersion = getArtifactVersion;
//# sourceMappingURL=artifact-utils.js.map

@ -1 +0,0 @@
{"version":3,"file":"artifact-utils.js","sourceRoot":"","sources":["../../src/artifact-utils.ts"],"names":[],"mappings":";;AACA,mCAAiE;AAEjE,MAAM,QAAQ,GAAG,yDAAyD,CAAC;AAC3E,MAAM,gBAAgB,GAAG,0DAA0D,CAAC;AAEpF,SAAgB,mBAAmB,CAAC,OAAgC;IAClE,4BAAoB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;IAE9C,IAAI,OAAO,CAAC,SAAS,EAAE;QACrB,OAAO,OAAO,CAAC,YAAY,CAAC;KAC7B;IAED,4BAAoB,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACtC,4BAAoB,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC;IAC1C,4BAAoB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;IAEzC,OAAO,GAAG;QACR,OAAO,CAAC,YAAY;QACpB,OAAO,CAAC,OAAO;QACf,OAAO,CAAC,QAAQ;QAChB,OAAO,CAAC,IAAI;QACZ,GAAG,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;KAC5D,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC;AACpB,CAAC;AAlBD,kDAkBC;AAED,SAAS,SAAS,CAChB,IAAkD,EAClD,OAAsB,EACtB,YAAoB;IAEpB,sDAAsD;IACtD,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;IAE1F,OAAO;IACL,SAAS;IACT,OAAO,CAAC,GAAG,CAAC,uBAAuB,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;QACxD,OAAO,CAAC,GAAG,CAAC,uBAAuB,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,uBAAuB,SAAS,EAAE,CAAC;QAC/C,eAAe;QACf,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,EAAE,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,+BAA+B,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QACrE,MAAM;QACN,OAAO,CAAC,GAAG,CAAC,YAAY,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QAClD,OAAO,CAAC,IAAI,CAAC;QACb,YAAY,CACb,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,oBAAoB,CAAC,OAAgC;IACzE,MAAM,IAAI,GAAkB,OAAO,CAAC,aAAa,IAAI,EAAE,CAAC;IACxD,IAAI,IAAI,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;IAC/C,IAAI,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QACvC,MAAM,iBAAiB,GAAG,SAAS,CAAC,gBAAgB,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC;QAChE,IAAI,iBAAiB,EAAE;YACrB,IAAI,GAAG,iBAAiB,CAAC;YACzB,OAAO,CAAC,IAAI,CAAC,wDAAwD,CAAC,CAAC;SACxE;aAAM;YACL,IAAI,GAAG,SAAS,CAAC,eAAe,EAAE,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAC3D;KACF;IACD,MAAM,IAAI,GAAG,SAAS,CAAC,WAAW,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,OAAO,CAChE,eAAe,EACf,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAClC,CAAC;IACF,MAAM,IAAI,GAAG,SAAS,CAAC,gBAAgB,EAAE,IAAI,EAAE,mBAAmB,CAAC,OAAO,CAAC,CAAC,CAAC;IAE7E,4CAA4C;IAC5C,IAAI,IAAI,CAAC,eAAe,EAAE;QACxB,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,CAAC;QAChD,OAAO,GAAG,CAAC;KACZ;IAED,OAAO,GAAG,IAAI,GAAG,IAAI,IAAI,IAAI,EAAE,CAAC;AAClC,CAAC;AAzBD,oDAyBC;AAED,SAAgB,kBAAkB,CAAC,OAAgC;IACjE,OAAO,wBAAgB,CAAC,SAAS,CAAC,eAAe,EAAE,OAAO,CAAC,aAAa,IAAI,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;AACpG,CAAC;AAFD,gDAEC"}

@ -1,3 +0,0 @@
import { DownloadOptions } from './types';
import { Downloader } from './Downloader';
export declare function getDownloaderForSystem(): Promise<Downloader<DownloadOptions>>;

@ -1,12 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
async function getDownloaderForSystem() {
// TODO: Resolve the downloader or default to GotDownloader
// Current thoughts are a dot-file traversal for something like
// ".electron.downloader" which would be a text file with the name of the
// npm module to import() and use as the downloader
const { GotDownloader } = await Promise.resolve().then(() => require('./GotDownloader'));
return new GotDownloader();
}
exports.getDownloaderForSystem = getDownloaderForSystem;
//# sourceMappingURL=downloader-resolver.js.map

@ -1 +0,0 @@
{"version":3,"file":"downloader-resolver.js","sourceRoot":"","sources":["../../src/downloader-resolver.ts"],"names":[],"mappings":";;AAGO,KAAK,UAAU,sBAAsB;IAC1C,2DAA2D;IAC3D,+DAA+D;IAC/D,yEAAyE;IACzE,mDAAmD;IACnD,MAAM,EAAE,aAAa,EAAE,GAAG,2CAAa,iBAAiB,EAAC,CAAC;IAC1D,OAAO,IAAI,aAAa,EAAE,CAAC;AAC7B,CAAC;AAPD,wDAOC"}

@ -1,18 +0,0 @@
import { ElectronDownloadRequestOptions, ElectronPlatformArtifactDetailsWithDefaults } from './types';
export { getHostArch } from './utils';
export { initializeProxy } from './proxy';
export * from './types';
/**
* Downloads an artifact from an Electron release and returns an absolute path
* to the downloaded file.
*
* @param artifactDetails - The information required to download the artifact
*/
export declare function downloadArtifact(_artifactDetails: ElectronPlatformArtifactDetailsWithDefaults): Promise<string>;
/**
* Downloads a specific version of Electron and returns an absolute path to a
* ZIP file.
*
* @param version - The version of Electron you want to download
*/
export declare function download(version: string, options?: ElectronDownloadRequestOptions): Promise<string>;

@ -1,140 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const debug_1 = require("debug");
const fs = require("fs-extra");
const path = require("path");
const semver = require("semver");
const sumchecker = require("sumchecker");
const artifact_utils_1 = require("./artifact-utils");
const Cache_1 = require("./Cache");
const downloader_resolver_1 = require("./downloader-resolver");
const proxy_1 = require("./proxy");
const utils_1 = require("./utils");
var utils_2 = require("./utils");
exports.getHostArch = utils_2.getHostArch;
var proxy_2 = require("./proxy");
exports.initializeProxy = proxy_2.initializeProxy;
const d = debug_1.default('@electron/get:index');
if (process.env.ELECTRON_GET_USE_PROXY) {
proxy_1.initializeProxy();
}
async function validateArtifact(artifactDetails, downloadedAssetPath, _downloadArtifact) {
return await utils_1.withTempDirectoryIn(artifactDetails.tempDirectory, async (tempFolder) => {
// Don't try to verify the hash of the hash file itself
// and for older versions that don't have a SHASUMS256.txt
if (!artifactDetails.artifactName.startsWith('SHASUMS256') &&
!artifactDetails.unsafelyDisableChecksums &&
semver.gte(artifactDetails.version, '1.3.2')) {
let shasumPath;
const checksums = artifactDetails.checksums;
if (checksums) {
shasumPath = path.resolve(tempFolder, 'SHASUMS256.txt');
const fileNames = Object.keys(checksums);
if (fileNames.length === 0) {
throw new Error('Provided "checksums" object is empty, cannot generate a valid SHASUMS256.txt');
}
const generatedChecksums = fileNames
.map(fileName => `${checksums[fileName]} *${fileName}`)
.join('\n');
await fs.writeFile(shasumPath, generatedChecksums);
}
else {
shasumPath = await _downloadArtifact({
isGeneric: true,
version: artifactDetails.version,
artifactName: 'SHASUMS256.txt',
force: artifactDetails.force,
downloadOptions: artifactDetails.downloadOptions,
cacheRoot: artifactDetails.cacheRoot,
downloader: artifactDetails.downloader,
mirrorOptions: artifactDetails.mirrorOptions,
});
}
// For versions 1.3.2 - 1.3.4, need to overwrite the `defaultTextEncoding` option:
// https://github.com/electron/electron/pull/6676#discussion_r75332120
if (semver.satisfies(artifactDetails.version, '1.3.2 - 1.3.4')) {
const validatorOptions = {};
validatorOptions.defaultTextEncoding = 'binary';
const checker = new sumchecker.ChecksumValidator('sha256', shasumPath, validatorOptions);
await checker.validate(path.dirname(downloadedAssetPath), path.basename(downloadedAssetPath));
}
else {
await sumchecker('sha256', shasumPath, path.dirname(downloadedAssetPath), [
path.basename(downloadedAssetPath),
]);
}
}
});
}
/**
* Downloads an artifact from an Electron release and returns an absolute path
* to the downloaded file.
*
* @param artifactDetails - The information required to download the artifact
*/
async function downloadArtifact(_artifactDetails) {
const artifactDetails = Object.assign({}, _artifactDetails);
if (!_artifactDetails.isGeneric) {
const platformArtifactDetails = artifactDetails;
if (!platformArtifactDetails.platform) {
d('No platform found, defaulting to the host platform');
platformArtifactDetails.platform = process.platform;
}
if (platformArtifactDetails.arch) {
platformArtifactDetails.arch = utils_1.getNodeArch(platformArtifactDetails.arch);
}
else {
d('No arch found, defaulting to the host arch');
platformArtifactDetails.arch = utils_1.getHostArch();
}
}
utils_1.ensureIsTruthyString(artifactDetails, 'version');
artifactDetails.version = artifact_utils_1.getArtifactVersion(artifactDetails);
const fileName = artifact_utils_1.getArtifactFileName(artifactDetails);
const url = await artifact_utils_1.getArtifactRemoteURL(artifactDetails);
const cache = new Cache_1.Cache(artifactDetails.cacheRoot);
// Do not check if the file exists in the cache when force === true
if (!artifactDetails.force) {
d(`Checking the cache (${artifactDetails.cacheRoot}) for ${fileName} (${url})`);
const cachedPath = await cache.getPathForFileInCache(url, fileName);
if (cachedPath === null) {
d('Cache miss');
}
else {
d('Cache hit');
try {
await validateArtifact(artifactDetails, cachedPath, downloadArtifact);
return cachedPath;
}
catch (err) {
d("Artifact in cache didn't match checksums", err);
d('falling back to re-download');
}
}
}
if (!artifactDetails.isGeneric &&
utils_1.isOfficialLinuxIA32Download(artifactDetails.platform, artifactDetails.arch, artifactDetails.version, artifactDetails.mirrorOptions)) {
console.warn('Official Linux/ia32 support is deprecated.');
console.warn('For more info: https://electronjs.org/blog/linux-32bit-support');
}
return await utils_1.withTempDirectoryIn(artifactDetails.tempDirectory, async (tempFolder) => {
const tempDownloadPath = path.resolve(tempFolder, artifact_utils_1.getArtifactFileName(artifactDetails));
const downloader = artifactDetails.downloader || (await downloader_resolver_1.getDownloaderForSystem());
d(`Downloading ${url} to ${tempDownloadPath} with options: ${JSON.stringify(artifactDetails.downloadOptions)}`);
await downloader.download(url, tempDownloadPath, artifactDetails.downloadOptions);
await validateArtifact(artifactDetails, tempDownloadPath, downloadArtifact);
return await cache.putFileInCache(url, tempDownloadPath, fileName);
});
}
exports.downloadArtifact = downloadArtifact;
/**
* Downloads a specific version of Electron and returns an absolute path to a
* ZIP file.
*
* @param version - The version of Electron you want to download
*/
function download(version, options) {
return downloadArtifact(Object.assign(Object.assign({}, options), { version, platform: process.platform, arch: process.arch, artifactName: 'electron' }));
}
exports.download = download;
//# sourceMappingURL=index.js.map

@ -1 +0,0 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;AAAA,iCAA0B;AAC1B,+BAA+B;AAC/B,6BAA6B;AAC7B,iCAAiC;AACjC,yCAAyC;AAEzC,qDAAiG;AAOjG,mCAAgC;AAChC,+DAA+D;AAC/D,mCAA0C;AAC1C,mCAOiB;AAEjB,iCAAsC;AAA7B,8BAAA,WAAW,CAAA;AACpB,iCAA0C;AAAjC,kCAAA,eAAe,CAAA;AAGxB,MAAM,CAAC,GAAG,eAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC,IAAI,OAAO,CAAC,GAAG,CAAC,sBAAsB,EAAE;IACtC,uBAAe,EAAE,CAAC;CACnB;AAMD,KAAK,UAAU,gBAAgB,CAC7B,eAAwC,EACxC,mBAA2B,EAC3B,iBAAqC;IAErC,OAAO,MAAM,2BAAmB,CAAC,eAAe,CAAC,aAAa,EAAE,KAAK,EAAC,UAAU,EAAC,EAAE;QACjF,uDAAuD;QACvD,0DAA0D;QAC1D,IACE,CAAC,eAAe,CAAC,YAAY,CAAC,UAAU,CAAC,YAAY,CAAC;YACtD,CAAC,eAAe,CAAC,wBAAwB;YACzC,MAAM,CAAC,GAAG,CAAC,eAAe,CAAC,OAAO,EAAE,OAAO,CAAC,EAC5C;YACA,IAAI,UAAkB,CAAC;YACvB,MAAM,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;YAC5C,IAAI,SAAS,EAAE;gBACb,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBACxD,MAAM,SAAS,GAAa,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;gBACnD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;oBAC1B,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E,CAAC;iBACH;gBACD,MAAM,kBAAkB,GAAG,SAAS;qBACjC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,KAAK,QAAQ,EAAE,CAAC;qBACtD,IAAI,CAAC,IAAI,CAAC,CAAC;gBACd,MAAM,EAAE,CAAC,SAAS,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;aACpD;iBAAM;gBACL,UAAU,GAAG,MAAM,iBAAiB,CAAC;oBACnC,SAAS,EAAE,IAAI;oBACf,OAAO,EAAE,eAAe,CAAC,OAAO;oBAChC,YAAY,EAAE,gBAAgB;oBAC9B,KAAK,EAAE,eAAe,CAAC,KAAK;oBAC5B,eAAe,EAAE,eAAe,CAAC,eAAe;oBAChD,SAAS,EAAE,eAAe,CAAC,SAAS;oBACpC,UAAU,EAAE,eAAe,CAAC,UAAU;oBACtC,aAAa,EAAE,eAAe,CAAC,aAAa;iBAC7C,CAAC,CAAC;aACJ;YAED,kFAAkF;YAClF,sEAAsE;YACtE,IAAI,MAAM,CAAC,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,eAAe,CAAC,EAAE;gBAC9D,MAAM,gBAAgB,GAA+B,EAAE,CAAC;gBACxD,gBAAgB,CAAC,mBAAmB,GAAG,QAAQ,CAAC;gBAChD,MAAM,OAAO,GAAG,IAAI,UAAU,CAAC,iBAAiB,CAAC,QAAQ,EAAE,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBACzF,MAAM,OAAO,CAAC,QAAQ,CACpB,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,EACjC,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CACnC,CAAC;aACH;iBAAM;gBACL,MAAM,UAAU,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;oBACxE,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC;iBACnC,CAAC,CAAC;aACJ;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,gBAAgB,CACpC,gBAA6D;IAE7D,MAAM,eAAe,qBACf,gBAA4C,CACjD,CAAC;IACF,IAAI,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC/B,MAAM,uBAAuB,GAAG,eAAkD,CAAC;QACnF,IAAI,CAAC,uBAAuB,CAAC,QAAQ,EAAE;YACrC,CAAC,CAAC,oDAAoD,CAAC,CAAC;YACxD,uBAAuB,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;SACrD;QACD,IAAI,uBAAuB,CAAC,IAAI,EAAE;YAChC,uBAAuB,CAAC,IAAI,GAAG,mBAAW,CAAC,uBAAuB,CAAC,IAAI,CAAC,CAAC;SAC1E;aAAM;YACL,CAAC,CAAC,4CAA4C,CAAC,CAAC;YAChD,uBAAuB,CAAC,IAAI,GAAG,mBAAW,EAAE,CAAC;SAC9C;KACF;IACD,4BAAoB,CAAC,eAAe,EAAE,SAAS,CAAC,CAAC;IAEjD,eAAe,CAAC,OAAO,GAAG,mCAAkB,CAAC,eAAe,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,oCAAmB,CAAC,eAAe,CAAC,CAAC;IACtD,MAAM,GAAG,GAAG,MAAM,qCAAoB,CAAC,eAAe,CAAC,CAAC;IACxD,MAAM,KAAK,GAAG,IAAI,aAAK,CAAC,eAAe,CAAC,SAAS,CAAC,CAAC;IAEnD,mEAAmE;IACnE,IAAI,CAAC,eAAe,CAAC,KAAK,EAAE;QAC1B,CAAC,CAAC,uBAAuB,eAAe,CAAC,SAAS,SAAS,QAAQ,KAAK,GAAG,GAAG,CAAC,CAAC;QAChF,MAAM,UAAU,GAAG,MAAM,KAAK,CAAC,qBAAqB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QAEpE,IAAI,UAAU,KAAK,IAAI,EAAE;YACvB,CAAC,CAAC,YAAY,CAAC,CAAC;SACjB;aAAM;YACL,CAAC,CAAC,WAAW,CAAC,CAAC;YACf,IAAI;gBACF,MAAM,gBAAgB,CAAC,eAAe,EAAE,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAEtE,OAAO,UAAU,CAAC;aACnB;YAAC,OAAO,GAAG,EAAE;gBACZ,CAAC,CAAC,0CAA0C,EAAE,GAAG,CAAC,CAAC;gBACnD,CAAC,CAAC,6BAA6B,CAAC,CAAC;aAClC;SACF;KACF;IAED,IACE,CAAC,eAAe,CAAC,SAAS;QAC1B,mCAA2B,CACzB,eAAe,CAAC,QAAQ,EACxB,eAAe,CAAC,IAAI,EACpB,eAAe,CAAC,OAAO,EACvB,eAAe,CAAC,aAAa,CAC9B,EACD;QACA,OAAO,CAAC,IAAI,CAAC,4CAA4C,CAAC,CAAC;QAC3D,OAAO,CAAC,IAAI,CAAC,gEAAgE,CAAC,CAAC;KAChF;IAED,OAAO,MAAM,2BAAmB,CAAC,eAAe,CAAC,aAAa,EAAE,KAAK,EAAC,UAAU,EAAC,EAAE;QACjF,MAAM,gBAAgB,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,oCAAmB,CAAC,eAAe,CAAC,CAAC,CAAC;QAExF,MAAM,UAAU,GAAG,eAAe,CAAC,UAAU,IAAI,CAAC,MAAM,4CAAsB,EAAE,CAAC,CAAC;QAClF,CAAC,CACC,eAAe,GAAG,OAAO,gBAAgB,kBAAkB,IAAI,CAAC,SAAS,CACvE,eAAe,CAAC,eAAe,CAChC,EAAE,CACJ,CAAC;QACF,MAAM,UAAU,CAAC,QAAQ,CAAC,GAAG,EAAE,gBAAgB,EAAE,eAAe,CAAC,eAAe,CAAC,CAAC;QAElF,MAAM,gBAAgB,CAAC,eAAe,EAAE,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;QAE5E,OAAO,MAAM,KAAK,CAAC,cAAc,CAAC,GAAG,EAAE,gBAAgB,EAAE,QAAQ,CAAC,CAAC;IACrE,CAAC,CAAC,CAAC;AACL,CAAC;AA1ED,4CA0EC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CACtB,OAAe,EACf,OAAwC;IAExC,OAAO,gBAAgB,iCAClB,OAAO,KACV,OAAO,EACP,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,IAAI,EAAE,OAAO,CAAC,IAAI,EAClB,YAAY,EAAE,UAAU,IACxB,CAAC;AACL,CAAC;AAXD,4BAWC"}

@ -1,4 +0,0 @@
/**
* Initializes a third-party proxy module for HTTP(S) requests.
*/
export declare function initializeProxy(): void;

@ -1,27 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const debug = require("debug");
const utils_1 = require("./utils");
const d = debug('@electron/get:proxy');
/**
* Initializes a third-party proxy module for HTTP(S) requests.
*/
function initializeProxy() {
try {
// See: https://github.com/electron/get/pull/214#discussion_r798845713
const env = utils_1.getEnv('GLOBAL_AGENT_');
utils_1.setEnv('GLOBAL_AGENT_HTTP_PROXY', env('HTTP_PROXY'));
utils_1.setEnv('GLOBAL_AGENT_HTTPS_PROXY', env('HTTPS_PROXY'));
utils_1.setEnv('GLOBAL_AGENT_NO_PROXY', env('NO_PROXY'));
/**
* TODO: replace global-agent with a hpagent. @BlackHole1
* https://github.com/sindresorhus/got/blob/HEAD/documentation/tips.md#proxying
*/
require('global-agent').bootstrap();
}
catch (e) {
d('Could not load either proxy modules, built-in proxy support not available:', e);
}
}
exports.initializeProxy = initializeProxy;
//# sourceMappingURL=proxy.js.map

@ -1 +0,0 @@
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../../src/proxy.ts"],"names":[],"mappings":";;AAAA,+BAA+B;AAC/B,mCAAyC;AAEzC,MAAM,CAAC,GAAG,KAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC;;GAEG;AACH,SAAgB,eAAe;IAC7B,IAAI;QACF,sEAAsE;QACtE,MAAM,GAAG,GAAG,cAAM,CAAC,eAAe,CAAC,CAAC;QAEpC,cAAM,CAAC,yBAAyB,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;QACrD,cAAM,CAAC,0BAA0B,EAAE,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC;QACvD,cAAM,CAAC,uBAAuB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC;QAEjD;;;WAGG;QACH,OAAO,CAAC,cAAc,CAAC,CAAC,SAAS,EAAE,CAAC;KACrC;IAAC,OAAO,CAAC,EAAE;QACV,CAAC,CAAC,4EAA4E,EAAE,CAAC,CAAC,CAAC;KACpF;AACH,CAAC;AAjBD,0CAiBC"}

@ -1,129 +0,0 @@
import { Downloader } from './Downloader';
export declare type DownloadOptions = any;
export interface MirrorOptions {
/**
* DEPRECATED - see nightlyMirror.
*/
nightly_mirror?: string;
/**
* The Electron nightly-specific mirror URL.
*/
nightlyMirror?: string;
/**
* The base URL of the mirror to download from,
* e.g https://github.com/electron/electron/releases/download
*/
mirror?: string;
/**
* The name of the directory to download from,
* often scoped by version number e.g 'v4.0.4'
*/
customDir?: string;
/**
* The name of the asset to download,
* e.g 'electron-v4.0.4-linux-x64.zip'
*/
customFilename?: string;
/**
* The version of the asset to download,
* e.g '4.0.4'
*/
customVersion?: string;
/**
* A function allowing customization of the url returned
* from getArtifactRemoteURL().
*/
resolveAssetURL?: (opts: DownloadOptions) => Promise<string>;
}
export interface ElectronDownloadRequest {
/**
* The version of Electron associated with the artifact.
*/
version: string;
/**
* The type of artifact. For example:
* * `electron`
* * `ffmpeg`
*/
artifactName: string;
}
export interface ElectronDownloadRequestOptions {
/**
* Whether to download an artifact regardless of whether it's in the cache directory.
*
* Defaults to `false`.
*/
force?: boolean;
/**
* When set to `true`, disables checking that the artifact download completed successfully
* with the correct payload.
*
* Defaults to `false`.
*/
unsafelyDisableChecksums?: boolean;
/**
* Provides checksums for the artifact as strings.
* Can be used if you already know the checksums of the Electron artifact
* you are downloading and want to skip the checksum file download
* without skipping the checksum validation.
*
* This should be an object whose keys are the file names of the artifacts and
* the values are their respective SHA256 checksums.
*/
checksums?: Record<string, string>;
/**
* The directory that caches Electron artifact downloads.
*
* The default value is dependent upon the host platform:
*
* * Linux: `$XDG_CACHE_HOME` or `~/.cache/electron/`
* * MacOS: `~/Library/Caches/electron/`
* * Windows: `%LOCALAPPDATA%/electron/Cache` or `~/AppData/Local/electron/Cache/`
*/
cacheRoot?: string;
/**
* Options passed to the downloader module.
*/
downloadOptions?: DownloadOptions;
/**
* Options related to specifying an artifact mirror.
*/
mirrorOptions?: MirrorOptions;
/**
* The custom [[Downloader]] class used to download artifacts. Defaults to the
* built-in [[GotDownloader]].
*/
downloader?: Downloader<DownloadOptions>;
/**
* A temporary directory for downloads.
* It is used before artifacts are put into cache.
*/
tempDirectory?: string;
}
export declare type ElectronPlatformArtifactDetails = {
/**
* The target artifact platform. These are Node-style platform names, for example:
* * `win32`
* * `darwin`
* * `linux`
*/
platform: string;
/**
* The target artifact architecture. These are Node-style architecture names, for example:
* * `ia32`
* * `x64`
* * `armv7l`
*/
arch: string;
artifactSuffix?: string;
isGeneric?: false;
} & ElectronDownloadRequest & ElectronDownloadRequestOptions;
export declare type ElectronGenericArtifactDetails = {
isGeneric: true;
} & ElectronDownloadRequest & ElectronDownloadRequestOptions;
export declare type ElectronArtifactDetails = ElectronPlatformArtifactDetails | ElectronGenericArtifactDetails;
export declare type Omit<T, K> = Pick<T, Exclude<keyof T, K>>;
export declare type ElectronPlatformArtifactDetailsWithDefaults = (Omit<ElectronPlatformArtifactDetails, 'platform' | 'arch'> & {
platform?: string;
arch?: string;
}) | ElectronGenericArtifactDetails;

@ -1,3 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=types.js.map

@ -1 +0,0 @@
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":""}

@ -1,25 +0,0 @@
export declare function withTempDirectoryIn<T>(parentDirectory: string | undefined, fn: (directory: string) => Promise<T>): Promise<T>;
export declare function withTempDirectory<T>(fn: (directory: string) => Promise<T>): Promise<T>;
export declare function normalizeVersion(version: string): string;
/**
* Runs the `uname` command and returns the trimmed output.
*/
export declare function uname(): string;
/**
* Generates an architecture name that would be used in an Electron or Node.js
* download file name.
*/
export declare function getNodeArch(arch: string): string;
/**
* Generates an architecture name that would be used in an Electron or Node.js
* download file name, from the `process` module information.
*/
export declare function getHostArch(): string;
export declare function ensureIsTruthyString<T, K extends keyof T>(obj: T, key: K): void;
export declare function isOfficialLinuxIA32Download(platform: string, arch: string, version: string, mirrorOptions?: object): boolean;
/**
* Find the value of a environment variable which may or may not have the
* prefix, in a case-insensitive manner.
*/
export declare function getEnv(prefix?: string): (name: string) => string | undefined;
export declare function setEnv(key: string, value: string | undefined): void;

@ -1,107 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const childProcess = require("child_process");
const fs = require("fs-extra");
const os = require("os");
const path = require("path");
async function useAndRemoveDirectory(directory, fn) {
let result;
try {
result = await fn(directory);
}
finally {
await fs.remove(directory);
}
return result;
}
async function withTempDirectoryIn(parentDirectory = os.tmpdir(), fn) {
const tempDirectoryPrefix = 'electron-download-';
const tempDirectory = await fs.mkdtemp(path.resolve(parentDirectory, tempDirectoryPrefix));
return useAndRemoveDirectory(tempDirectory, fn);
}
exports.withTempDirectoryIn = withTempDirectoryIn;
async function withTempDirectory(fn) {
return withTempDirectoryIn(undefined, fn);
}
exports.withTempDirectory = withTempDirectory;
function normalizeVersion(version) {
if (!version.startsWith('v')) {
return `v${version}`;
}
return version;
}
exports.normalizeVersion = normalizeVersion;
/**
* Runs the `uname` command and returns the trimmed output.
*/
function uname() {
return childProcess
.execSync('uname -m')
.toString()
.trim();
}
exports.uname = uname;
/**
* Generates an architecture name that would be used in an Electron or Node.js
* download file name.
*/
function getNodeArch(arch) {
if (arch === 'arm') {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
switch (process.config.variables.arm_version) {
case '6':
return uname();
case '7':
default:
return 'armv7l';
}
}
return arch;
}
exports.getNodeArch = getNodeArch;
/**
* Generates an architecture name that would be used in an Electron or Node.js
* download file name, from the `process` module information.
*/
function getHostArch() {
return getNodeArch(process.arch);
}
exports.getHostArch = getHostArch;
function ensureIsTruthyString(obj, key) {
if (!obj[key] || typeof obj[key] !== 'string') {
throw new Error(`Expected property "${key}" to be provided as a string but it was not`);
}
}
exports.ensureIsTruthyString = ensureIsTruthyString;
function isOfficialLinuxIA32Download(platform, arch, version, mirrorOptions) {
return (platform === 'linux' &&
arch === 'ia32' &&
Number(version.slice(1).split('.')[0]) >= 4 &&
typeof mirrorOptions === 'undefined');
}
exports.isOfficialLinuxIA32Download = isOfficialLinuxIA32Download;
/**
* Find the value of a environment variable which may or may not have the
* prefix, in a case-insensitive manner.
*/
function getEnv(prefix = '') {
const envsLowerCase = {};
for (const envKey in process.env) {
envsLowerCase[envKey.toLowerCase()] = process.env[envKey];
}
return (name) => {
return (envsLowerCase[`${prefix}${name}`.toLowerCase()] ||
envsLowerCase[name.toLowerCase()] ||
undefined);
};
}
exports.getEnv = getEnv;
function setEnv(key, value) {
// The `void` operator always returns `undefined`.
// See: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/void
if (value !== void 0) {
process.env[key] = value;
}
}
exports.setEnv = setEnv;
//# sourceMappingURL=utils.js.map

@ -1 +0,0 @@
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/utils.ts"],"names":[],"mappings":";;AAAA,8CAA8C;AAC9C,+BAA+B;AAC/B,yBAAyB;AACzB,6BAA6B;AAE7B,KAAK,UAAU,qBAAqB,CAClC,SAAiB,EACjB,EAAqC;IAErC,IAAI,MAAS,CAAC;IACd,IAAI;QACF,MAAM,GAAG,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC;KAC9B;YAAS;QACR,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;KAC5B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,mBAAmB,CACvC,kBAA0B,EAAE,CAAC,MAAM,EAAE,EACrC,EAAqC;IAErC,MAAM,mBAAmB,GAAG,oBAAoB,CAAC;IACjD,MAAM,aAAa,GAAG,MAAM,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE,mBAAmB,CAAC,CAAC,CAAC;IAC3F,OAAO,qBAAqB,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC;AAClD,CAAC;AAPD,kDAOC;AAEM,KAAK,UAAU,iBAAiB,CAAI,EAAqC;IAC9E,OAAO,mBAAmB,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;AAC5C,CAAC;AAFD,8CAEC;AAED,SAAgB,gBAAgB,CAAC,OAAe;IAC9C,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC5B,OAAO,IAAI,OAAO,EAAE,CAAC;KACtB;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AALD,4CAKC;AAED;;GAEG;AACH,SAAgB,KAAK;IACnB,OAAO,YAAY;SAChB,QAAQ,CAAC,UAAU,CAAC;SACpB,QAAQ,EAAE;SACV,IAAI,EAAE,CAAC;AACZ,CAAC;AALD,sBAKC;AAED;;;GAGG;AACH,SAAgB,WAAW,CAAC,IAAY;IACtC,IAAI,IAAI,KAAK,KAAK,EAAE;QAClB,8DAA8D;QAC9D,QAAS,OAAO,CAAC,MAAM,CAAC,SAAiB,CAAC,WAAW,EAAE;YACrD,KAAK,GAAG;gBACN,OAAO,KAAK,EAAE,CAAC;YACjB,KAAK,GAAG,CAAC;YACT;gBACE,OAAO,QAAQ,CAAC;SACnB;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAbD,kCAaC;AAED;;;GAGG;AACH,SAAgB,WAAW;IACzB,OAAO,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAFD,kCAEC;AAED,SAAgB,oBAAoB,CAAuB,GAAM,EAAE,GAAM;IACvE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,OAAO,GAAG,CAAC,GAAG,CAAC,KAAK,QAAQ,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,sBAAsB,GAAG,6CAA6C,CAAC,CAAC;KACzF;AACH,CAAC;AAJD,oDAIC;AAED,SAAgB,2BAA2B,CACzC,QAAgB,EAChB,IAAY,EACZ,OAAe,EACf,aAAsB;IAEtB,OAAO,CACL,QAAQ,KAAK,OAAO;QACpB,IAAI,KAAK,MAAM;QACf,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QAC3C,OAAO,aAAa,KAAK,WAAW,CACrC,CAAC;AACJ,CAAC;AAZD,kEAYC;AAED;;;GAGG;AACH,SAAgB,MAAM,CAAC,MAAM,GAAG,EAAE;IAChC,MAAM,aAAa,GAAsB,EAAE,CAAC;IAE5C,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,GAAG,EAAE;QAChC,aAAa,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;KAC3D;IAED,OAAO,CAAC,IAAY,EAAsB,EAAE;QAC1C,OAAO,CACL,aAAa,CAAC,GAAG,MAAM,GAAG,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;YAC/C,aAAa,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC;YACjC,SAAS,CACV,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC;AAdD,wBAcC;AAED,SAAgB,MAAM,CAAC,GAAW,EAAE,KAAyB;IAC3D,kDAAkD;IAClD,wFAAwF;IACxF,IAAI,KAAK,KAAK,KAAK,CAAC,EAAE;QACpB,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAC1B;AACH,CAAC;AAND,wBAMC"}

@ -1,8 +0,0 @@
export declare class Cache {
private cacheRoot;
constructor(cacheRoot?: string);
static getCacheDirectory(downloadUrl: string): string;
getCachePath(downloadUrl: string, fileName: string): string;
getPathForFileInCache(url: string, fileName: string): Promise<string | null>;
putFileInCache(url: string, currentPath: string, fileName: string): Promise<string>;
}

@ -1,57 +0,0 @@
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import debug from 'debug';
import envPaths from 'env-paths';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as url from 'url';
import * as crypto from 'crypto';
const d = debug('@electron/get:cache');
const defaultCacheRoot = envPaths('electron', {
suffix: '',
}).cache;
export class Cache {
constructor(cacheRoot = defaultCacheRoot) {
this.cacheRoot = cacheRoot;
}
static getCacheDirectory(downloadUrl) {
const parsedDownloadUrl = url.parse(downloadUrl);
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { search, hash, pathname } = parsedDownloadUrl, rest = __rest(parsedDownloadUrl, ["search", "hash", "pathname"]);
const strippedUrl = url.format(Object.assign(Object.assign({}, rest), { pathname: path.dirname(pathname || 'electron') }));
return crypto
.createHash('sha256')
.update(strippedUrl)
.digest('hex');
}
getCachePath(downloadUrl, fileName) {
return path.resolve(this.cacheRoot, Cache.getCacheDirectory(downloadUrl), fileName);
}
async getPathForFileInCache(url, fileName) {
const cachePath = this.getCachePath(url, fileName);
if (await fs.pathExists(cachePath)) {
return cachePath;
}
return null;
}
async putFileInCache(url, currentPath, fileName) {
const cachePath = this.getCachePath(url, fileName);
d(`Moving ${currentPath} to ${cachePath}`);
if (await fs.pathExists(cachePath)) {
d('* Replacing existing file');
await fs.remove(cachePath);
}
await fs.move(currentPath, cachePath);
return cachePath;
}
}
//# sourceMappingURL=Cache.js.map

@ -1 +0,0 @@
{"version":3,"file":"Cache.js","sourceRoot":"","sources":["../../src/Cache.ts"],"names":[],"mappings":";;;;;;;;;;;AAAA,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,QAAQ,MAAM,WAAW,CAAC;AACjC,OAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAC/B,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,GAAG,MAAM,KAAK,CAAC;AAC3B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC,MAAM,CAAC,GAAG,KAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC,MAAM,gBAAgB,GAAG,QAAQ,CAAC,UAAU,EAAE;IAC5C,MAAM,EAAE,EAAE;CACX,CAAC,CAAC,KAAK,CAAC;AAET,MAAM,OAAO,KAAK;IAChB,YAAoB,YAAY,gBAAgB;QAA5B,cAAS,GAAT,SAAS,CAAmB;IAAG,CAAC;IAE7C,MAAM,CAAC,iBAAiB,CAAC,WAAmB;QACjD,MAAM,iBAAiB,GAAG,GAAG,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;QACjD,6DAA6D;QAC7D,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,KAAc,iBAAiB,EAA7B,gEAA6B,CAAC;QAC9D,MAAM,WAAW,GAAG,GAAG,CAAC,MAAM,iCAAM,IAAI,KAAE,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,UAAU,CAAC,IAAG,CAAC;QAE5F,OAAO,MAAM;aACV,UAAU,CAAC,QAAQ,CAAC;aACpB,MAAM,CAAC,WAAW,CAAC;aACnB,MAAM,CAAC,KAAK,CAAC,CAAC;IACnB,CAAC;IAEM,YAAY,CAAC,WAAmB,EAAE,QAAgB;QACvD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,iBAAiB,CAAC,WAAW,CAAC,EAAE,QAAQ,CAAC,CAAC;IACtF,CAAC;IAEM,KAAK,CAAC,qBAAqB,CAAC,GAAW,EAAE,QAAgB;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACnD,IAAI,MAAM,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAClC,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,KAAK,CAAC,cAAc,CAAC,GAAW,EAAE,WAAmB,EAAE,QAAgB;QAC5E,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACnD,CAAC,CAAC,UAAU,WAAW,OAAO,SAAS,EAAE,CAAC,CAAC;QAC3C,IAAI,MAAM,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAClC,CAAC,CAAC,2BAA2B,CAAC,CAAC;YAC/B,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SAC5B;QAED,MAAM,EAAE,CAAC,IAAI,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;QAEtC,OAAO,SAAS,CAAC;IACnB,CAAC;CACF"}

@ -1,3 +0,0 @@
export interface Downloader<T> {
download(url: string, targetFilePath: string, options: T): Promise<void>;
}

@ -1 +0,0 @@
//# sourceMappingURL=Downloader.js.map

@ -1 +0,0 @@
{"version":3,"file":"Downloader.js","sourceRoot":"","sources":["../../src/Downloader.ts"],"names":[],"mappings":""}

@ -1,21 +0,0 @@
import { Progress as GotProgress, Options as GotOptions } from 'got';
import { Downloader } from './Downloader';
/**
* See [`got#options`](https://github.com/sindresorhus/got#options) for possible keys/values.
*/
export declare type GotDownloaderOptions = (GotOptions & {
isStream?: true;
}) & {
/**
* if defined, triggers every time `got`'s `downloadProgress` event callback is triggered.
*/
getProgressCallback?: (progress: GotProgress) => Promise<void>;
/**
* if `true`, disables the console progress bar (setting the `ELECTRON_GET_NO_PROGRESS`
* environment variable to a non-empty value also does this).
*/
quiet?: boolean;
};
export declare class GotDownloader implements Downloader<GotDownloaderOptions> {
download(url: string, targetFilePath: string, options?: GotDownloaderOptions): Promise<void>;
}

@ -1,73 +0,0 @@
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import * as fs from 'fs-extra';
import got, { HTTPError } from 'got';
import * as path from 'path';
import * as ProgressBar from 'progress';
const PROGRESS_BAR_DELAY_IN_SECONDS = 30;
export class GotDownloader {
async download(url, targetFilePath, options) {
if (!options) {
options = {};
}
const { quiet, getProgressCallback } = options, gotOptions = __rest(options, ["quiet", "getProgressCallback"]);
let downloadCompleted = false;
let bar;
let progressPercent;
let timeout = undefined;
await fs.mkdirp(path.dirname(targetFilePath));
const writeStream = fs.createWriteStream(targetFilePath);
if (!quiet || !process.env.ELECTRON_GET_NO_PROGRESS) {
const start = new Date();
timeout = setTimeout(() => {
if (!downloadCompleted) {
bar = new ProgressBar(`Downloading ${path.basename(url)}: [:bar] :percent ETA: :eta seconds `, {
curr: progressPercent,
total: 100,
});
// https://github.com/visionmedia/node-progress/issues/159
// eslint-disable-next-line @typescript-eslint/no-explicit-any
bar.start = start;
}
}, PROGRESS_BAR_DELAY_IN_SECONDS * 1000);
}
await new Promise((resolve, reject) => {
const downloadStream = got.stream(url, gotOptions);
downloadStream.on('downloadProgress', async (progress) => {
progressPercent = progress.percent;
if (bar) {
bar.update(progress.percent);
}
if (getProgressCallback) {
await getProgressCallback(progress);
}
});
downloadStream.on('error', error => {
if (error instanceof HTTPError && error.response.statusCode === 404) {
error.message += ` for ${error.response.url}`;
}
if (writeStream.destroy) {
writeStream.destroy(error);
}
reject(error);
});
writeStream.on('error', error => reject(error));
writeStream.on('close', () => resolve());
downloadStream.pipe(writeStream);
});
downloadCompleted = true;
if (timeout) {
clearTimeout(timeout);
}
}
}
//# sourceMappingURL=GotDownloader.js.map

@ -1 +0,0 @@
{"version":3,"file":"GotDownloader.js","sourceRoot":"","sources":["../../src/GotDownloader.ts"],"names":[],"mappings":";;;;;;;;;;;AAAA,OAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAC/B,OAAO,GAAG,EAAE,EAAE,SAAS,EAAkD,MAAM,KAAK,CAAC;AACrF,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,WAAW,MAAM,UAAU,CAAC;AAIxC,MAAM,6BAA6B,GAAG,EAAE,CAAC;AAiBzC,MAAM,OAAO,aAAa;IACxB,KAAK,CAAC,QAAQ,CACZ,GAAW,EACX,cAAsB,EACtB,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QACD,MAAM,EAAE,KAAK,EAAE,mBAAmB,KAAoB,OAAO,EAAzB,8DAAyB,CAAC;QAC9D,IAAI,iBAAiB,GAAG,KAAK,CAAC;QAC9B,IAAI,GAA4B,CAAC;QACjC,IAAI,eAAuB,CAAC;QAC5B,IAAI,OAAO,GAA+B,SAAS,CAAC;QACpD,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC;QAC9C,MAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,CAAC,cAAc,CAAC,CAAC;QAEzD,IAAI,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,IAAI,EAAE,CAAC;YACzB,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE;gBACxB,IAAI,CAAC,iBAAiB,EAAE;oBACtB,GAAG,GAAG,IAAI,WAAW,CACnB,eAAe,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,sCAAsC,EACvE;wBACE,IAAI,EAAE,eAAe;wBACrB,KAAK,EAAE,GAAG;qBACX,CACF,CAAC;oBACF,0DAA0D;oBAC1D,8DAA8D;oBAC7D,GAAW,CAAC,KAAK,GAAG,KAAK,CAAC;iBAC5B;YACH,CAAC,EAAE,6BAA6B,GAAG,IAAI,CAAC,CAAC;SAC1C;QACD,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC1C,MAAM,cAAc,GAAG,GAAG,CAAC,MAAM,CAAC,GAAG,EAAE,UAAU,CAAC,CAAC;YACnD,cAAc,CAAC,EAAE,CAAC,kBAAkB,EAAE,KAAK,EAAC,QAAQ,EAAC,EAAE;gBACrD,eAAe,GAAG,QAAQ,CAAC,OAAO,CAAC;gBACnC,IAAI,GAAG,EAAE;oBACP,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;iBAC9B;gBACD,IAAI,mBAAmB,EAAE;oBACvB,MAAM,mBAAmB,CAAC,QAAQ,CAAC,CAAC;iBACrC;YACH,CAAC,CAAC,CAAC;YACH,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;gBACjC,IAAI,KAAK,YAAY,SAAS,IAAI,KAAK,CAAC,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;oBACnE,KAAK,CAAC,OAAO,IAAI,QAAQ,KAAK,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;iBAC/C;gBACD,IAAI,WAAW,CAAC,OAAO,EAAE;oBACvB,WAAW,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;iBAC5B;gBAED,MAAM,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC,CAAC,CAAC;YACH,WAAW,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;YAChD,WAAW,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;YAEzC,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;QAEH,iBAAiB,GAAG,IAAI,CAAC;QACzB,IAAI,OAAO,EAAE;YACX,YAAY,CAAC,OAAO,CAAC,CAAC;SACvB;IACH,CAAC;CACF"}

@ -1,4 +0,0 @@
import { ElectronArtifactDetails } from './types';
export declare function getArtifactFileName(details: ElectronArtifactDetails): string;
export declare function getArtifactRemoteURL(details: ElectronArtifactDetails): Promise<string>;
export declare function getArtifactVersion(details: ElectronArtifactDetails): string;

@ -1,61 +0,0 @@
import { ensureIsTruthyString, normalizeVersion } from './utils';
const BASE_URL = 'https://github.com/electron/electron/releases/download/';
const NIGHTLY_BASE_URL = 'https://github.com/electron/nightlies/releases/download/';
export function getArtifactFileName(details) {
ensureIsTruthyString(details, 'artifactName');
if (details.isGeneric) {
return details.artifactName;
}
ensureIsTruthyString(details, 'arch');
ensureIsTruthyString(details, 'platform');
ensureIsTruthyString(details, 'version');
return `${[
details.artifactName,
details.version,
details.platform,
details.arch,
...(details.artifactSuffix ? [details.artifactSuffix] : []),
].join('-')}.zip`;
}
function mirrorVar(name, options, defaultValue) {
// Convert camelCase to camel_case for env var reading
const snakeName = name.replace(/([a-z])([A-Z])/g, (_, a, b) => `${a}_${b}`).toLowerCase();
return (
// .npmrc
process.env[`npm_config_electron_${name.toLowerCase()}`] ||
process.env[`NPM_CONFIG_ELECTRON_${snakeName.toUpperCase()}`] ||
process.env[`npm_config_electron_${snakeName}`] ||
// package.json
process.env[`npm_package_config_electron_${name}`] ||
process.env[`npm_package_config_electron_${snakeName.toLowerCase()}`] ||
// env
process.env[`ELECTRON_${snakeName.toUpperCase()}`] ||
options[name] ||
defaultValue);
}
export async function getArtifactRemoteURL(details) {
const opts = details.mirrorOptions || {};
let base = mirrorVar('mirror', opts, BASE_URL);
if (details.version.includes('nightly')) {
const nightlyDeprecated = mirrorVar('nightly_mirror', opts, '');
if (nightlyDeprecated) {
base = nightlyDeprecated;
console.warn(`nightly_mirror is deprecated, please use nightlyMirror`);
}
else {
base = mirrorVar('nightlyMirror', opts, NIGHTLY_BASE_URL);
}
}
const path = mirrorVar('customDir', opts, details.version).replace('{{ version }}', details.version.replace(/^v/, ''));
const file = mirrorVar('customFilename', opts, getArtifactFileName(details));
// Allow customized download URL resolution.
if (opts.resolveAssetURL) {
const url = await opts.resolveAssetURL(details);
return url;
}
return `${base}${path}/${file}`;
}
export function getArtifactVersion(details) {
return normalizeVersion(mirrorVar('customVersion', details.mirrorOptions || {}, details.version));
}
//# sourceMappingURL=artifact-utils.js.map

@ -1 +0,0 @@
{"version":3,"file":"artifact-utils.js","sourceRoot":"","sources":["../../src/artifact-utils.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,oBAAoB,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAEjE,MAAM,QAAQ,GAAG,yDAAyD,CAAC;AAC3E,MAAM,gBAAgB,GAAG,0DAA0D,CAAC;AAEpF,MAAM,UAAU,mBAAmB,CAAC,OAAgC;IAClE,oBAAoB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;IAE9C,IAAI,OAAO,CAAC,SAAS,EAAE;QACrB,OAAO,OAAO,CAAC,YAAY,CAAC;KAC7B;IAED,oBAAoB,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACtC,oBAAoB,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC;IAC1C,oBAAoB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;IAEzC,OAAO,GAAG;QACR,OAAO,CAAC,YAAY;QACpB,OAAO,CAAC,OAAO;QACf,OAAO,CAAC,QAAQ;QAChB,OAAO,CAAC,IAAI;QACZ,GAAG,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;KAC5D,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC;AACpB,CAAC;AAED,SAAS,SAAS,CAChB,IAAkD,EAClD,OAAsB,EACtB,YAAoB;IAEpB,sDAAsD;IACtD,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;IAE1F,OAAO;IACL,SAAS;IACT,OAAO,CAAC,GAAG,CAAC,uBAAuB,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;QACxD,OAAO,CAAC,GAAG,CAAC,uBAAuB,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,uBAAuB,SAAS,EAAE,CAAC;QAC/C,eAAe;QACf,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,EAAE,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,+BAA+B,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QACrE,MAAM;QACN,OAAO,CAAC,GAAG,CAAC,YAAY,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QAClD,OAAO,CAAC,IAAI,CAAC;QACb,YAAY,CACb,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CAAC,OAAgC;IACzE,MAAM,IAAI,GAAkB,OAAO,CAAC,aAAa,IAAI,EAAE,CAAC;IACxD,IAAI,IAAI,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;IAC/C,IAAI,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QACvC,MAAM,iBAAiB,GAAG,SAAS,CAAC,gBAAgB,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC;QAChE,IAAI,iBAAiB,EAAE;YACrB,IAAI,GAAG,iBAAiB,CAAC;YACzB,OAAO,CAAC,IAAI,CAAC,wDAAwD,CAAC,CAAC;SACxE;aAAM;YACL,IAAI,GAAG,SAAS,CAAC,eAAe,EAAE,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAC3D;KACF;IACD,MAAM,IAAI,GAAG,SAAS,CAAC,WAAW,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,OAAO,CAChE,eAAe,EACf,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAClC,CAAC;IACF,MAAM,IAAI,GAAG,SAAS,CAAC,gBAAgB,EAAE,IAAI,EAAE,mBAAmB,CAAC,OAAO,CAAC,CAAC,CAAC;IAE7E,4CAA4C;IAC5C,IAAI,IAAI,CAAC,eAAe,EAAE;QACxB,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,CAAC;QAChD,OAAO,GAAG,CAAC;KACZ;IAED,OAAO,GAAG,IAAI,GAAG,IAAI,IAAI,IAAI,EAAE,CAAC;AAClC,CAAC;AAED,MAAM,UAAU,kBAAkB,CAAC,OAAgC;IACjE,OAAO,gBAAgB,CAAC,SAAS,CAAC,eAAe,EAAE,OAAO,CAAC,aAAa,IAAI,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;AACpG,CAAC"}

@ -1,3 +0,0 @@
import { DownloadOptions } from './types';
import { Downloader } from './Downloader';
export declare function getDownloaderForSystem(): Promise<Downloader<DownloadOptions>>;

@ -1,9 +0,0 @@
export async function getDownloaderForSystem() {
// TODO: Resolve the downloader or default to GotDownloader
// Current thoughts are a dot-file traversal for something like
// ".electron.downloader" which would be a text file with the name of the
// npm module to import() and use as the downloader
const { GotDownloader } = await import('./GotDownloader');
return new GotDownloader();
}
//# sourceMappingURL=downloader-resolver.js.map

@ -1 +0,0 @@
{"version":3,"file":"downloader-resolver.js","sourceRoot":"","sources":["../../src/downloader-resolver.ts"],"names":[],"mappings":"AAGA,MAAM,CAAC,KAAK,UAAU,sBAAsB;IAC1C,2DAA2D;IAC3D,+DAA+D;IAC/D,yEAAyE;IACzE,mDAAmD;IACnD,MAAM,EAAE,aAAa,EAAE,GAAG,MAAM,MAAM,CAAC,iBAAiB,CAAC,CAAC;IAC1D,OAAO,IAAI,aAAa,EAAE,CAAC;AAC7B,CAAC"}

@ -1,18 +0,0 @@
import { ElectronDownloadRequestOptions, ElectronPlatformArtifactDetailsWithDefaults } from './types';
export { getHostArch } from './utils';
export { initializeProxy } from './proxy';
export * from './types';
/**
* Downloads an artifact from an Electron release and returns an absolute path
* to the downloaded file.
*
* @param artifactDetails - The information required to download the artifact
*/
export declare function downloadArtifact(_artifactDetails: ElectronPlatformArtifactDetailsWithDefaults): Promise<string>;
/**
* Downloads a specific version of Electron and returns an absolute path to a
* ZIP file.
*
* @param version - The version of Electron you want to download
*/
export declare function download(version: string, options?: ElectronDownloadRequestOptions): Promise<string>;

@ -1,134 +0,0 @@
import debug from 'debug';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as semver from 'semver';
import * as sumchecker from 'sumchecker';
import { getArtifactFileName, getArtifactRemoteURL, getArtifactVersion } from './artifact-utils';
import { Cache } from './Cache';
import { getDownloaderForSystem } from './downloader-resolver';
import { initializeProxy } from './proxy';
import { withTempDirectoryIn, getHostArch, getNodeArch, ensureIsTruthyString, isOfficialLinuxIA32Download, } from './utils';
export { getHostArch } from './utils';
export { initializeProxy } from './proxy';
const d = debug('@electron/get:index');
if (process.env.ELECTRON_GET_USE_PROXY) {
initializeProxy();
}
async function validateArtifact(artifactDetails, downloadedAssetPath, _downloadArtifact) {
return await withTempDirectoryIn(artifactDetails.tempDirectory, async (tempFolder) => {
// Don't try to verify the hash of the hash file itself
// and for older versions that don't have a SHASUMS256.txt
if (!artifactDetails.artifactName.startsWith('SHASUMS256') &&
!artifactDetails.unsafelyDisableChecksums &&
semver.gte(artifactDetails.version, '1.3.2')) {
let shasumPath;
const checksums = artifactDetails.checksums;
if (checksums) {
shasumPath = path.resolve(tempFolder, 'SHASUMS256.txt');
const fileNames = Object.keys(checksums);
if (fileNames.length === 0) {
throw new Error('Provided "checksums" object is empty, cannot generate a valid SHASUMS256.txt');
}
const generatedChecksums = fileNames
.map(fileName => `${checksums[fileName]} *${fileName}`)
.join('\n');
await fs.writeFile(shasumPath, generatedChecksums);
}
else {
shasumPath = await _downloadArtifact({
isGeneric: true,
version: artifactDetails.version,
artifactName: 'SHASUMS256.txt',
force: artifactDetails.force,
downloadOptions: artifactDetails.downloadOptions,
cacheRoot: artifactDetails.cacheRoot,
downloader: artifactDetails.downloader,
mirrorOptions: artifactDetails.mirrorOptions,
});
}
// For versions 1.3.2 - 1.3.4, need to overwrite the `defaultTextEncoding` option:
// https://github.com/electron/electron/pull/6676#discussion_r75332120
if (semver.satisfies(artifactDetails.version, '1.3.2 - 1.3.4')) {
const validatorOptions = {};
validatorOptions.defaultTextEncoding = 'binary';
const checker = new sumchecker.ChecksumValidator('sha256', shasumPath, validatorOptions);
await checker.validate(path.dirname(downloadedAssetPath), path.basename(downloadedAssetPath));
}
else {
await sumchecker('sha256', shasumPath, path.dirname(downloadedAssetPath), [
path.basename(downloadedAssetPath),
]);
}
}
});
}
/**
* Downloads an artifact from an Electron release and returns an absolute path
* to the downloaded file.
*
* @param artifactDetails - The information required to download the artifact
*/
export async function downloadArtifact(_artifactDetails) {
const artifactDetails = Object.assign({}, _artifactDetails);
if (!_artifactDetails.isGeneric) {
const platformArtifactDetails = artifactDetails;
if (!platformArtifactDetails.platform) {
d('No platform found, defaulting to the host platform');
platformArtifactDetails.platform = process.platform;
}
if (platformArtifactDetails.arch) {
platformArtifactDetails.arch = getNodeArch(platformArtifactDetails.arch);
}
else {
d('No arch found, defaulting to the host arch');
platformArtifactDetails.arch = getHostArch();
}
}
ensureIsTruthyString(artifactDetails, 'version');
artifactDetails.version = getArtifactVersion(artifactDetails);
const fileName = getArtifactFileName(artifactDetails);
const url = await getArtifactRemoteURL(artifactDetails);
const cache = new Cache(artifactDetails.cacheRoot);
// Do not check if the file exists in the cache when force === true
if (!artifactDetails.force) {
d(`Checking the cache (${artifactDetails.cacheRoot}) for ${fileName} (${url})`);
const cachedPath = await cache.getPathForFileInCache(url, fileName);
if (cachedPath === null) {
d('Cache miss');
}
else {
d('Cache hit');
try {
await validateArtifact(artifactDetails, cachedPath, downloadArtifact);
return cachedPath;
}
catch (err) {
d("Artifact in cache didn't match checksums", err);
d('falling back to re-download');
}
}
}
if (!artifactDetails.isGeneric &&
isOfficialLinuxIA32Download(artifactDetails.platform, artifactDetails.arch, artifactDetails.version, artifactDetails.mirrorOptions)) {
console.warn('Official Linux/ia32 support is deprecated.');
console.warn('For more info: https://electronjs.org/blog/linux-32bit-support');
}
return await withTempDirectoryIn(artifactDetails.tempDirectory, async (tempFolder) => {
const tempDownloadPath = path.resolve(tempFolder, getArtifactFileName(artifactDetails));
const downloader = artifactDetails.downloader || (await getDownloaderForSystem());
d(`Downloading ${url} to ${tempDownloadPath} with options: ${JSON.stringify(artifactDetails.downloadOptions)}`);
await downloader.download(url, tempDownloadPath, artifactDetails.downloadOptions);
await validateArtifact(artifactDetails, tempDownloadPath, downloadArtifact);
return await cache.putFileInCache(url, tempDownloadPath, fileName);
});
}
/**
* Downloads a specific version of Electron and returns an absolute path to a
* ZIP file.
*
* @param version - The version of Electron you want to download
*/
export function download(version, options) {
return downloadArtifact(Object.assign(Object.assign({}, options), { version, platform: process.platform, arch: process.arch, artifactName: 'electron' }));
}
//# sourceMappingURL=index.js.map

@ -1 +0,0 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAC/B,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,UAAU,MAAM,YAAY,CAAC;AAEzC,OAAO,EAAE,mBAAmB,EAAE,oBAAoB,EAAE,kBAAkB,EAAE,MAAM,kBAAkB,CAAC;AAOjG,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,sBAAsB,EAAE,MAAM,uBAAuB,CAAC;AAC/D,OAAO,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAC1C,OAAO,EACL,mBAAmB,EACnB,WAAW,EACX,WAAW,EACX,oBAAoB,EACpB,2BAA2B,GAE5B,MAAM,SAAS,CAAC;AAEjB,OAAO,EAAE,WAAW,EAAE,MAAM,SAAS,CAAC;AACtC,OAAO,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAG1C,MAAM,CAAC,GAAG,KAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC,IAAI,OAAO,CAAC,GAAG,CAAC,sBAAsB,EAAE;IACtC,eAAe,EAAE,CAAC;CACnB;AAMD,KAAK,UAAU,gBAAgB,CAC7B,eAAwC,EACxC,mBAA2B,EAC3B,iBAAqC;IAErC,OAAO,MAAM,mBAAmB,CAAC,eAAe,CAAC,aAAa,EAAE,KAAK,EAAC,UAAU,EAAC,EAAE;QACjF,uDAAuD;QACvD,0DAA0D;QAC1D,IACE,CAAC,eAAe,CAAC,YAAY,CAAC,UAAU,CAAC,YAAY,CAAC;YACtD,CAAC,eAAe,CAAC,wBAAwB;YACzC,MAAM,CAAC,GAAG,CAAC,eAAe,CAAC,OAAO,EAAE,OAAO,CAAC,EAC5C;YACA,IAAI,UAAkB,CAAC;YACvB,MAAM,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;YAC5C,IAAI,SAAS,EAAE;gBACb,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBACxD,MAAM,SAAS,GAAa,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;gBACnD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;oBAC1B,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E,CAAC;iBACH;gBACD,MAAM,kBAAkB,GAAG,SAAS;qBACjC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,KAAK,QAAQ,EAAE,CAAC;qBACtD,IAAI,CAAC,IAAI,CAAC,CAAC;gBACd,MAAM,EAAE,CAAC,SAAS,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;aACpD;iBAAM;gBACL,UAAU,GAAG,MAAM,iBAAiB,CAAC;oBACnC,SAAS,EAAE,IAAI;oBACf,OAAO,EAAE,eAAe,CAAC,OAAO;oBAChC,YAAY,EAAE,gBAAgB;oBAC9B,KAAK,EAAE,eAAe,CAAC,KAAK;oBAC5B,eAAe,EAAE,eAAe,CAAC,eAAe;oBAChD,SAAS,EAAE,eAAe,CAAC,SAAS;oBACpC,UAAU,EAAE,eAAe,CAAC,UAAU;oBACtC,aAAa,EAAE,eAAe,CAAC,aAAa;iBAC7C,CAAC,CAAC;aACJ;YAED,kFAAkF;YAClF,sEAAsE;YACtE,IAAI,MAAM,CAAC,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,eAAe,CAAC,EAAE;gBAC9D,MAAM,gBAAgB,GAA+B,EAAE,CAAC;gBACxD,gBAAgB,CAAC,mBAAmB,GAAG,QAAQ,CAAC;gBAChD,MAAM,OAAO,GAAG,IAAI,UAAU,CAAC,iBAAiB,CAAC,QAAQ,EAAE,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBACzF,MAAM,OAAO,CAAC,QAAQ,CACpB,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,EACjC,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CACnC,CAAC;aACH;iBAAM;gBACL,MAAM,UAAU,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;oBACxE,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC;iBACnC,CAAC,CAAC;aACJ;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CACpC,gBAA6D;IAE7D,MAAM,eAAe,qBACf,gBAA4C,CACjD,CAAC;IACF,IAAI,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC/B,MAAM,uBAAuB,GAAG,eAAkD,CAAC;QACnF,IAAI,CAAC,uBAAuB,CAAC,QAAQ,EAAE;YACrC,CAAC,CAAC,oDAAoD,CAAC,CAAC;YACxD,uBAAuB,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;SACrD;QACD,IAAI,uBAAuB,CAAC,IAAI,EAAE;YAChC,uBAAuB,CAAC,IAAI,GAAG,WAAW,CAAC,uBAAuB,CAAC,IAAI,CAAC,CAAC;SAC1E;aAAM;YACL,CAAC,CAAC,4CAA4C,CAAC,CAAC;YAChD,uBAAuB,CAAC,IAAI,GAAG,WAAW,EAAE,CAAC;SAC9C;KACF;IACD,oBAAoB,CAAC,eAAe,EAAE,SAAS,CAAC,CAAC;IAEjD,eAAe,CAAC,OAAO,GAAG,kBAAkB,CAAC,eAAe,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,mBAAmB,CAAC,eAAe,CAAC,CAAC;IACtD,MAAM,GAAG,GAAG,MAAM,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACxD,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,eAAe,CAAC,SAAS,CAAC,CAAC;IAEnD,mEAAmE;IACnE,IAAI,CAAC,eAAe,CAAC,KAAK,EAAE;QAC1B,CAAC,CAAC,uBAAuB,eAAe,CAAC,SAAS,SAAS,QAAQ,KAAK,GAAG,GAAG,CAAC,CAAC;QAChF,MAAM,UAAU,GAAG,MAAM,KAAK,CAAC,qBAAqB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QAEpE,IAAI,UAAU,KAAK,IAAI,EAAE;YACvB,CAAC,CAAC,YAAY,CAAC,CAAC;SACjB;aAAM;YACL,CAAC,CAAC,WAAW,CAAC,CAAC;YACf,IAAI;gBACF,MAAM,gBAAgB,CAAC,eAAe,EAAE,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAEtE,OAAO,UAAU,CAAC;aACnB;YAAC,OAAO,GAAG,EAAE;gBACZ,CAAC,CAAC,0CAA0C,EAAE,GAAG,CAAC,CAAC;gBACnD,CAAC,CAAC,6BAA6B,CAAC,CAAC;aAClC;SACF;KACF;IAED,IACE,CAAC,eAAe,CAAC,SAAS;QAC1B,2BAA2B,CACzB,eAAe,CAAC,QAAQ,EACxB,eAAe,CAAC,IAAI,EACpB,eAAe,CAAC,OAAO,EACvB,eAAe,CAAC,aAAa,CAC9B,EACD;QACA,OAAO,CAAC,IAAI,CAAC,4CAA4C,CAAC,CAAC;QAC3D,OAAO,CAAC,IAAI,CAAC,gEAAgE,CAAC,CAAC;KAChF;IAED,OAAO,MAAM,mBAAmB,CAAC,eAAe,CAAC,aAAa,EAAE,KAAK,EAAC,UAAU,EAAC,EAAE;QACjF,MAAM,gBAAgB,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,mBAAmB,CAAC,eAAe,CAAC,CAAC,CAAC;QAExF,MAAM,UAAU,GAAG,eAAe,CAAC,UAAU,IAAI,CAAC,MAAM,sBAAsB,EAAE,CAAC,CAAC;QAClF,CAAC,CACC,eAAe,GAAG,OAAO,gBAAgB,kBAAkB,IAAI,CAAC,SAAS,CACvE,eAAe,CAAC,eAAe,CAChC,EAAE,CACJ,CAAC;QACF,MAAM,UAAU,CAAC,QAAQ,CAAC,GAAG,EAAE,gBAAgB,EAAE,eAAe,CAAC,eAAe,CAAC,CAAC;QAElF,MAAM,gBAAgB,CAAC,eAAe,EAAE,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;QAE5E,OAAO,MAAM,KAAK,CAAC,cAAc,CAAC,GAAG,EAAE,gBAAgB,EAAE,QAAQ,CAAC,CAAC;IACrE,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,QAAQ,CACtB,OAAe,EACf,OAAwC;IAExC,OAAO,gBAAgB,iCAClB,OAAO,KACV,OAAO,EACP,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,IAAI,EAAE,OAAO,CAAC,IAAI,EAClB,YAAY,EAAE,UAAU,IACxB,CAAC;AACL,CAAC"}

@ -1,4 +0,0 @@
/**
* Initializes a third-party proxy module for HTTP(S) requests.
*/
export declare function initializeProxy(): void;

@ -1,24 +0,0 @@
import * as debug from 'debug';
import { getEnv, setEnv } from './utils';
const d = debug('@electron/get:proxy');
/**
* Initializes a third-party proxy module for HTTP(S) requests.
*/
export function initializeProxy() {
try {
// See: https://github.com/electron/get/pull/214#discussion_r798845713
const env = getEnv('GLOBAL_AGENT_');
setEnv('GLOBAL_AGENT_HTTP_PROXY', env('HTTP_PROXY'));
setEnv('GLOBAL_AGENT_HTTPS_PROXY', env('HTTPS_PROXY'));
setEnv('GLOBAL_AGENT_NO_PROXY', env('NO_PROXY'));
/**
* TODO: replace global-agent with a hpagent. @BlackHole1
* https://github.com/sindresorhus/got/blob/HEAD/documentation/tips.md#proxying
*/
require('global-agent').bootstrap();
}
catch (e) {
d('Could not load either proxy modules, built-in proxy support not available:', e);
}
}
//# sourceMappingURL=proxy.js.map

@ -1 +0,0 @@
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../../src/proxy.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AAEzC,MAAM,CAAC,GAAG,KAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC;;GAEG;AACH,MAAM,UAAU,eAAe;IAC7B,IAAI;QACF,sEAAsE;QACtE,MAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,CAAC;QAEpC,MAAM,CAAC,yBAAyB,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;QACrD,MAAM,CAAC,0BAA0B,EAAE,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC;QACvD,MAAM,CAAC,uBAAuB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC;QAEjD;;;WAGG;QACH,OAAO,CAAC,cAAc,CAAC,CAAC,SAAS,EAAE,CAAC;KACrC;IAAC,OAAO,CAAC,EAAE;QACV,CAAC,CAAC,4EAA4E,EAAE,CAAC,CAAC,CAAC;KACpF;AACH,CAAC"}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save