Compare commits
65 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bb304ce10b | ||
|
|
8e2842b4a3 | ||
|
|
02119d5a83 | ||
|
|
1948f1caa9 | ||
|
|
52fa9a2a78 | ||
|
|
4e631b7ca2 | ||
|
|
fe1a0e06b0 | ||
|
|
9a808beecc | ||
|
|
381ca1a748 | ||
|
|
0d2b974dcc | ||
|
|
0cfaddcc77 | ||
|
|
fddff57c15 | ||
|
|
b02ce7697f | ||
|
|
85b1f90f2c | ||
|
|
3ecbbd5710 | ||
|
|
8bff0b7579 | ||
|
|
283773eb45 | ||
|
|
a99ae94fc2 | ||
|
|
0d70c1c39f | ||
|
|
ba587f0a08 | ||
|
|
580002844b | ||
|
|
365775311f | ||
|
|
1fc0005ae8 | ||
|
|
64cbc83faf | ||
|
|
691e4ef31d | ||
|
|
d902197267 | ||
|
|
72a3f83d27 | ||
|
|
3cc1365561 | ||
|
|
3a30fe989b | ||
|
|
01dfb8a963 | ||
|
|
3bd173d61a | ||
|
|
479e80d6a9 | ||
|
|
2c3c1a60a0 | ||
|
|
cdcbe58dee | ||
|
|
38ab1c3559 | ||
|
|
9f86e1dd2b | ||
|
|
a626463c95 | ||
|
|
36b58a84f3 | ||
|
|
fe8d99e31d | ||
|
|
d336231787 | ||
|
|
6cd85d89aa | ||
|
|
7b1c610963 | ||
|
|
060a299188 | ||
|
|
64420e4c32 | ||
|
|
2b411ce98b | ||
|
|
e80eed7f69 | ||
|
|
6053796432 | ||
|
|
0a1d0f916c | ||
|
|
1d4e198ba5 | ||
|
|
2f06fcab5f | ||
|
|
e7d57dd1e5 | ||
|
|
d9b1b4104f | ||
|
|
b445fa1974 | ||
|
|
f265d1f5e2 | ||
|
|
a05a5e6db8 | ||
|
|
8c55e5b4f3 | ||
|
|
477a52e779 | ||
|
|
107823fc2c | ||
|
|
621083fe1f | ||
|
|
0770238718 | ||
|
|
c01deb5576 | ||
|
|
82acb6fc72 | ||
|
|
8bb61593b2 | ||
|
|
3ebf924651 | ||
|
|
46a3b7e94d |
@@ -1,46 +1,37 @@
|
||||
step-restore-cache: &step-restore-cache
|
||||
restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ arch }}-{{ checksum "yarn.lock" }}
|
||||
- v1-dependencies-{{ arch }}
|
||||
|
||||
steps-test: &steps-test
|
||||
steps:
|
||||
- checkout
|
||||
- *step-restore-cache
|
||||
- run: yarn --frozen-lockfile
|
||||
- save_cache:
|
||||
paths:
|
||||
- node_modules
|
||||
key: v1-dependencies-{{ arch }}-{{ checksum "yarn.lock" }}
|
||||
- run: yarn build
|
||||
- run: yarn test
|
||||
|
||||
version: 2.1
|
||||
jobs:
|
||||
test:
|
||||
macos:
|
||||
xcode: "12.2.0"
|
||||
<<: *steps-test
|
||||
|
||||
release:
|
||||
docker:
|
||||
- image: circleci/node:14.15
|
||||
steps:
|
||||
- checkout
|
||||
- *step-restore-cache
|
||||
- run: yarn --frozen-lockfile
|
||||
- run: npx semantic-release
|
||||
orbs:
|
||||
cfa: continuousauth/npm@1.0.2
|
||||
node: electronjs/node@1.4.1
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
test_and_release:
|
||||
# Run the test jobs first, then the release only when all the test jobs are successful
|
||||
jobs:
|
||||
- test
|
||||
- release:
|
||||
- node/test:
|
||||
executor: node/macos
|
||||
name: test-mac-<< matrix.node-version >>
|
||||
override-ci-command: yarn install --frozen-lockfile --ignore-engines
|
||||
test-steps:
|
||||
- run: yarn build
|
||||
- run: yarn lint
|
||||
- run: yarn test
|
||||
use-test-steps: true
|
||||
matrix:
|
||||
alias: test
|
||||
parameters:
|
||||
node-version:
|
||||
- 20.5.0
|
||||
- 18.17.0
|
||||
- 16.20.1
|
||||
- 14.21.3
|
||||
- 12.22.12
|
||||
- 10.24.1
|
||||
- cfa/release:
|
||||
requires:
|
||||
- test
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
- main
|
||||
context: cfa-release
|
||||
|
||||
1
.github/CODEOWNERS
vendored
Normal file
1
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1 @@
|
||||
* @electron/wg-ecosystem
|
||||
29
.github/workflows/add-to-project.yml
vendored
Normal file
29
.github/workflows/add-to-project.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Add to Ecosystem WG Project
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
add-to-project:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Generate GitHub App token
|
||||
uses: electron/github-app-auth-action@384fd19694fe7b6dcc9a684746c6976ad78228ae # v1.1.1
|
||||
id: generate-token
|
||||
with:
|
||||
creds: ${{ secrets.ECOSYSTEM_ISSUE_TRIAGE_GH_APP_CREDS }}
|
||||
org: electron
|
||||
- name: Add to Project
|
||||
uses: dsanders11/project-actions/add-item@3a81985616963f32fae17d1d1b406c631f3201a1 # v1.1.0
|
||||
with:
|
||||
field: Opened
|
||||
field-value: ${{ github.event.pull_request.created_at || github.event.issue.created_at }}
|
||||
project-number: 89
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
26
.github/workflows/semantic.yml
vendored
Normal file
26
.github/workflows/semantic.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: "Check Semantic Commit"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- synchronize
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
main:
|
||||
permissions:
|
||||
pull-requests: read # for amannn/action-semantic-pull-request to analyze PRs
|
||||
statuses: write # for amannn/action-semantic-pull-request to mark status of analyzed PR
|
||||
name: Validate PR Title
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: semantic-pull-request
|
||||
uses: amannn/action-semantic-pull-request@c3cd5d1ea3580753008872425915e343e351ab54 # v5.2.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
validateSingleCommit: false
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,3 +1,5 @@
|
||||
node_modules
|
||||
dist
|
||||
*.app
|
||||
entry-asar/*.js*
|
||||
entry-asar/*.ts
|
||||
*.app
|
||||
|
||||
4
.husky/pre-commit
Executable file
4
.husky/pre-commit
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
yarn lint-staged
|
||||
@@ -4,6 +4,7 @@
|
||||
"@semantic-release/release-notes-generator",
|
||||
"@continuous-auth/semantic-release-npm",
|
||||
"@semantic-release/github"
|
||||
]
|
||||
],
|
||||
"branches": [ "main" ]
|
||||
}
|
||||
|
||||
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Contributors to the Electron project
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -2,7 +2,8 @@
|
||||
|
||||
> Create universal macOS Electron applications
|
||||
|
||||
[](https://circleci.com/gh/electron/universal)
|
||||
[](https://circleci.com/gh/electron/universal)
|
||||
[](https://npm.im/@electron/universal)
|
||||
|
||||
|
||||
## Usage
|
||||
@@ -30,3 +31,7 @@ because it contains two apps in one.
|
||||
The way `@electron/universal` works today means you don't need to worry about
|
||||
things like building universal versions of your native modules. As long as
|
||||
your x64 and arm64 apps work in isolation the Universal app will work as well.
|
||||
|
||||
#### How do I build my app for Apple silicon in the first place?
|
||||
|
||||
Check out the [Electron Apple silicon blog post](https://www.electronjs.org/blog/apple-silicon)
|
||||
|
||||
19
entry-asar/ambient.d.ts
vendored
Normal file
19
entry-asar/ambient.d.ts
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
declare namespace NodeJS {
|
||||
interface Process extends EventEmitter {
|
||||
// This is an undocumented private API. It exists.
|
||||
_archPath: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module 'electron' {
|
||||
const app: Electron.App;
|
||||
|
||||
namespace Electron {
|
||||
interface App {
|
||||
getAppPath: () => string;
|
||||
setAppPath: (p: string) => void;
|
||||
}
|
||||
}
|
||||
|
||||
export { app };
|
||||
}
|
||||
27
entry-asar/has-asar.ts
Normal file
27
entry-asar/has-asar.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { app } from 'electron';
|
||||
import path from 'path';
|
||||
|
||||
if (process.arch === 'arm64') {
|
||||
setPaths('arm64');
|
||||
} else {
|
||||
setPaths('x64');
|
||||
}
|
||||
|
||||
function setPaths(platform: string) {
|
||||
// This should return the full path, ending in something like
|
||||
// Notion.app/Contents/Resources/app.asar
|
||||
const appPath = app.getAppPath();
|
||||
const asarFile = `app-${platform}.asar`;
|
||||
|
||||
// Maybe we'll handle this in Electron one day
|
||||
if (path.basename(appPath) === 'app.asar') {
|
||||
const platformAppPath = path.join(path.dirname(appPath), asarFile);
|
||||
|
||||
// This is an undocumented API. It exists.
|
||||
app.setAppPath(platformAppPath);
|
||||
}
|
||||
|
||||
process._archPath = require.resolve(`../${asarFile}`);
|
||||
}
|
||||
|
||||
require(process._archPath);
|
||||
@@ -1,7 +0,0 @@
|
||||
if (process.arch === 'arm64') {
|
||||
process._asarPath = require.resolve('../arm64.app.asar');
|
||||
} else {
|
||||
process._asarPath = require.resolve('../x64.app.asar');
|
||||
}
|
||||
|
||||
require(process._asarPath);
|
||||
27
entry-asar/no-asar.ts
Normal file
27
entry-asar/no-asar.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { app } from 'electron';
|
||||
import path from 'path';
|
||||
|
||||
if (process.arch === 'arm64') {
|
||||
setPaths('arm64');
|
||||
} else {
|
||||
setPaths('x64');
|
||||
}
|
||||
|
||||
function setPaths(platform: string) {
|
||||
// This should return the full path, ending in something like
|
||||
// Notion.app/Contents/Resources/app
|
||||
const appPath = app.getAppPath();
|
||||
const appFolder = `app-${platform}`;
|
||||
|
||||
// Maybe we'll handle this in Electron one day
|
||||
if (path.basename(appPath) === 'app') {
|
||||
const platformAppPath = path.join(path.dirname(appPath), appFolder);
|
||||
|
||||
// This is an undocumented private API. It exists.
|
||||
app.setAppPath(platformAppPath);
|
||||
}
|
||||
|
||||
process._archPath = require.resolve(`../${appFolder}`);
|
||||
}
|
||||
|
||||
require(process._archPath);
|
||||
35
package.json
35
package.json
@@ -10,33 +10,52 @@
|
||||
"apple silicon",
|
||||
"universal"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/electron/universal.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.6"
|
||||
},
|
||||
"files": [
|
||||
"dist/*",
|
||||
"entry-asar/*",
|
||||
"!entry-asar/**/*.ts",
|
||||
"README.md"
|
||||
],
|
||||
"author": "Samuel Attard",
|
||||
"scripts": {
|
||||
"build": "tsc && tsc -p tsconfig.esm.json",
|
||||
"lint": "prettier --check \"src/**/*.ts\"",
|
||||
"build": "tsc -p tsconfig.cjs.json && tsc -p tsconfig.esm.json && tsc -p tsconfig.entry-asar.json",
|
||||
"lint": "prettier --check \"{src,entry-asar}/**/*.ts\"",
|
||||
"prettier:write": "prettier --write \"{src,entry-asar}/**/*.ts\"",
|
||||
"prepublishOnly": "npm run build",
|
||||
"test": "exit 0"
|
||||
"test": "exit 0",
|
||||
"prepare": "husky install"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@continuous-auth/semantic-release-npm": "^2.0.0",
|
||||
"@continuous-auth/semantic-release-npm": "^3.0.0",
|
||||
"@types/debug": "^4.1.5",
|
||||
"@types/fs-extra": "^9.0.4",
|
||||
"@types/minimatch": "^3.0.5",
|
||||
"@types/node": "^14.14.7",
|
||||
"husky": "^4.3.0",
|
||||
"@types/plist": "^3.0.2",
|
||||
"husky": "^8.0.0",
|
||||
"lint-staged": "^10.5.1",
|
||||
"prettier": "^2.1.2",
|
||||
"semantic-release": "^17.2.2",
|
||||
"typescript": "^4.0.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@electron/asar": "^3.2.1",
|
||||
"@malept/cross-spawn-promise": "^1.1.0",
|
||||
"asar": "^3.0.3",
|
||||
"fs-extra": "^9.0.1"
|
||||
"debug": "^4.3.1",
|
||||
"dir-compare": "^3.0.0",
|
||||
"fs-extra": "^9.0.1",
|
||||
"minimatch": "^3.0.4",
|
||||
"plist": "^3.0.4"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.ts": [
|
||||
"prettier --write"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
226
src/asar-utils.ts
Normal file
226
src/asar-utils.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import asar from '@electron/asar';
|
||||
import { execFileSync } from 'child_process';
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import minimatch from 'minimatch';
|
||||
import os from 'os';
|
||||
import { d } from './debug';
|
||||
|
||||
const LIPO = 'lipo';
|
||||
|
||||
export enum AsarMode {
|
||||
NO_ASAR,
|
||||
HAS_ASAR,
|
||||
}
|
||||
|
||||
export type MergeASARsOptions = {
|
||||
x64AsarPath: string;
|
||||
arm64AsarPath: string;
|
||||
outputAsarPath: string;
|
||||
|
||||
singleArchFiles?: string;
|
||||
};
|
||||
|
||||
// See: https://github.com/apple-opensource-mirror/llvmCore/blob/0c60489d96c87140db9a6a14c6e82b15f5e5d252/include/llvm/Object/MachOFormat.h#L108-L112
|
||||
const MACHO_MAGIC = new Set([
|
||||
// 32-bit Mach-O
|
||||
0xfeedface,
|
||||
0xcefaedfe,
|
||||
|
||||
// 64-bit Mach-O
|
||||
0xfeedfacf,
|
||||
0xcffaedfe,
|
||||
]);
|
||||
|
||||
const MACHO_UNIVERSAL_MAGIC = new Set([
|
||||
// universal
|
||||
0xcafebabe,
|
||||
0xbebafeca,
|
||||
]);
|
||||
|
||||
export const detectAsarMode = async (appPath: string) => {
|
||||
d('checking asar mode of', appPath);
|
||||
const asarPath = path.resolve(appPath, 'Contents', 'Resources', 'app.asar');
|
||||
|
||||
if (!(await fs.pathExists(asarPath))) {
|
||||
d('determined no asar');
|
||||
return AsarMode.NO_ASAR;
|
||||
}
|
||||
|
||||
d('determined has asar');
|
||||
return AsarMode.HAS_ASAR;
|
||||
};
|
||||
|
||||
export const generateAsarIntegrity = (asarPath: string) => {
|
||||
return {
|
||||
algorithm: 'SHA256' as const,
|
||||
hash: crypto
|
||||
.createHash('SHA256')
|
||||
.update(asar.getRawHeader(asarPath).headerString)
|
||||
.digest('hex'),
|
||||
};
|
||||
};
|
||||
|
||||
function toRelativePath(file: string): string {
|
||||
return file.replace(/^\//, '');
|
||||
}
|
||||
|
||||
function isDirectory(a: string, file: string): boolean {
|
||||
return Boolean('files' in asar.statFile(a, file));
|
||||
}
|
||||
|
||||
function checkSingleArch(archive: string, file: string, allowList?: string): void {
|
||||
if (allowList === undefined || !minimatch(file, allowList, { matchBase: true })) {
|
||||
throw new Error(
|
||||
`Detected unique file "${file}" in "${archive}" not covered by ` +
|
||||
`allowList rule: "${allowList}"`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const mergeASARs = async ({
|
||||
x64AsarPath,
|
||||
arm64AsarPath,
|
||||
outputAsarPath,
|
||||
singleArchFiles,
|
||||
}: MergeASARsOptions): Promise<void> => {
|
||||
d(`merging ${x64AsarPath} and ${arm64AsarPath}`);
|
||||
|
||||
const x64Files = new Set(asar.listPackage(x64AsarPath).map(toRelativePath));
|
||||
const arm64Files = new Set(asar.listPackage(arm64AsarPath).map(toRelativePath));
|
||||
|
||||
//
|
||||
// Build set of unpacked directories and files
|
||||
//
|
||||
|
||||
const unpackedFiles = new Set<string>();
|
||||
|
||||
function buildUnpacked(a: string, fileList: Set<string>): void {
|
||||
for (const file of fileList) {
|
||||
const stat = asar.statFile(a, file);
|
||||
|
||||
if (!('unpacked' in stat) || !stat.unpacked) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ('files' in stat) {
|
||||
continue;
|
||||
}
|
||||
unpackedFiles.add(file);
|
||||
}
|
||||
}
|
||||
|
||||
buildUnpacked(x64AsarPath, x64Files);
|
||||
buildUnpacked(arm64AsarPath, arm64Files);
|
||||
|
||||
//
|
||||
// Build list of files/directories unique to each asar
|
||||
//
|
||||
|
||||
for (const file of x64Files) {
|
||||
if (!arm64Files.has(file)) {
|
||||
checkSingleArch(x64AsarPath, file, singleArchFiles);
|
||||
}
|
||||
}
|
||||
const arm64Unique = [];
|
||||
for (const file of arm64Files) {
|
||||
if (!x64Files.has(file)) {
|
||||
checkSingleArch(arm64AsarPath, file, singleArchFiles);
|
||||
arm64Unique.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Find common bindings with different content
|
||||
//
|
||||
|
||||
const commonBindings = [];
|
||||
for (const file of x64Files) {
|
||||
if (!arm64Files.has(file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip directories
|
||||
if (isDirectory(x64AsarPath, file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const x64Content = asar.extractFile(x64AsarPath, file);
|
||||
const arm64Content = asar.extractFile(arm64AsarPath, file);
|
||||
|
||||
if (x64Content.compare(arm64Content) === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
MACHO_UNIVERSAL_MAGIC.has(x64Content.readUInt32LE(0)) &&
|
||||
MACHO_UNIVERSAL_MAGIC.has(arm64Content.readUInt32LE(0))
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!MACHO_MAGIC.has(x64Content.readUInt32LE(0))) {
|
||||
throw new Error(`Can't reconcile two non-macho files ${file}`);
|
||||
}
|
||||
|
||||
commonBindings.push(file);
|
||||
}
|
||||
|
||||
//
|
||||
// Extract both
|
||||
//
|
||||
|
||||
const x64Dir = await fs.mkdtemp(path.join(os.tmpdir(), 'x64-'));
|
||||
const arm64Dir = await fs.mkdtemp(path.join(os.tmpdir(), 'arm64-'));
|
||||
|
||||
try {
|
||||
d(`extracting ${x64AsarPath} to ${x64Dir}`);
|
||||
asar.extractAll(x64AsarPath, x64Dir);
|
||||
|
||||
d(`extracting ${arm64AsarPath} to ${arm64Dir}`);
|
||||
asar.extractAll(arm64AsarPath, arm64Dir);
|
||||
|
||||
for (const file of arm64Unique) {
|
||||
const source = path.resolve(arm64Dir, file);
|
||||
const destination = path.resolve(x64Dir, file);
|
||||
|
||||
if (isDirectory(arm64AsarPath, file)) {
|
||||
d(`creating unique directory: ${file}`);
|
||||
await fs.mkdirp(destination);
|
||||
continue;
|
||||
}
|
||||
|
||||
d(`xopying unique file: ${file}`);
|
||||
await fs.mkdirp(path.dirname(destination));
|
||||
await fs.copy(source, destination);
|
||||
}
|
||||
|
||||
for (const binding of commonBindings) {
|
||||
const source = await fs.realpath(path.resolve(arm64Dir, binding));
|
||||
const destination = await fs.realpath(path.resolve(x64Dir, binding));
|
||||
|
||||
d(`merging binding: ${binding}`);
|
||||
execFileSync(LIPO, [source, destination, '-create', '-output', destination]);
|
||||
}
|
||||
|
||||
d(`creating archive at ${outputAsarPath}`);
|
||||
|
||||
const resolvedUnpack = Array.from(unpackedFiles).map((file) => path.join(x64Dir, file));
|
||||
|
||||
let unpack: string | undefined;
|
||||
if (resolvedUnpack.length > 1) {
|
||||
unpack = `{${resolvedUnpack.join(',')}}`;
|
||||
} else if (resolvedUnpack.length === 1) {
|
||||
unpack = resolvedUnpack[0];
|
||||
}
|
||||
|
||||
await asar.createPackageWithOptions(x64Dir, outputAsarPath, {
|
||||
unpack,
|
||||
});
|
||||
|
||||
d('done merging');
|
||||
} finally {
|
||||
await Promise.all([fs.remove(x64Dir), fs.remove(arm64Dir)]);
|
||||
}
|
||||
};
|
||||
3
src/debug.ts
Normal file
3
src/debug.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import debug from 'debug';
|
||||
|
||||
export const d = debug('electron-universal');
|
||||
73
src/file-utils.ts
Normal file
73
src/file-utils.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { spawn, ExitCodeError } from '@malept/cross-spawn-promise';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
const MACHO_PREFIX = 'Mach-O ';
|
||||
|
||||
export enum AppFileType {
|
||||
MACHO,
|
||||
PLAIN,
|
||||
INFO_PLIST,
|
||||
SNAPSHOT,
|
||||
APP_CODE,
|
||||
}
|
||||
|
||||
export type AppFile = {
|
||||
relativePath: string;
|
||||
type: AppFileType;
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param appPath Path to the application
|
||||
*/
|
||||
export const getAllAppFiles = async (appPath: string): Promise<AppFile[]> => {
|
||||
const files: AppFile[] = [];
|
||||
|
||||
const visited = new Set<string>();
|
||||
const traverse = async (p: string) => {
|
||||
p = await fs.realpath(p);
|
||||
if (visited.has(p)) return;
|
||||
visited.add(p);
|
||||
|
||||
const info = await fs.stat(p);
|
||||
if (info.isSymbolicLink()) return;
|
||||
if (info.isFile()) {
|
||||
let fileType = AppFileType.PLAIN;
|
||||
|
||||
var fileOutput = '';
|
||||
try {
|
||||
fileOutput = await spawn('file', ['--brief', '--no-pad', p]);
|
||||
} catch (e) {
|
||||
if (e instanceof ExitCodeError) {
|
||||
/* silently accept error codes from "file" */
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (p.includes('app.asar')) {
|
||||
fileType = AppFileType.APP_CODE;
|
||||
} else if (fileOutput.startsWith(MACHO_PREFIX)) {
|
||||
fileType = AppFileType.MACHO;
|
||||
} else if (p.endsWith('.bin')) {
|
||||
fileType = AppFileType.SNAPSHOT;
|
||||
} else if (path.basename(p) === 'Info.plist') {
|
||||
fileType = AppFileType.INFO_PLIST;
|
||||
}
|
||||
|
||||
files.push({
|
||||
relativePath: path.relative(appPath, p),
|
||||
type: fileType,
|
||||
});
|
||||
}
|
||||
|
||||
if (info.isDirectory()) {
|
||||
for (const child of await fs.readdir(p)) {
|
||||
await traverse(path.resolve(p, child));
|
||||
}
|
||||
}
|
||||
};
|
||||
await traverse(appPath);
|
||||
|
||||
return files;
|
||||
};
|
||||
356
src/index.ts
356
src/index.ts
@@ -1,13 +1,19 @@
|
||||
import { spawn } from '@malept/cross-spawn-promise';
|
||||
import * as asar from 'asar';
|
||||
import * as asar from '@electron/asar';
|
||||
import * as crypto from 'crypto';
|
||||
import * as fs from 'fs-extra';
|
||||
import minimatch from 'minimatch';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import * as plist from 'plist';
|
||||
import * as dircompare from 'dir-compare';
|
||||
|
||||
const MACHO_PREFIX = 'Mach-O ';
|
||||
import { AppFile, AppFileType, getAllAppFiles } from './file-utils';
|
||||
import { AsarMode, detectAsarMode, generateAsarIntegrity, mergeASARs } from './asar-utils';
|
||||
import { sha } from './sha';
|
||||
import { d } from './debug';
|
||||
|
||||
type MakeUniversalOpts = {
|
||||
export type MakeUniversalOpts = {
|
||||
/**
|
||||
* Absolute file system path to the x64 version of your application. E.g. /Foo/bar/MyApp_x64.app
|
||||
*/
|
||||
@@ -26,80 +32,30 @@ type MakeUniversalOpts = {
|
||||
* Forcefully overwrite any existing files that are in the way of generating the universal application
|
||||
*/
|
||||
force: boolean;
|
||||
/**
|
||||
* Merge x64 and arm64 ASARs into one.
|
||||
*/
|
||||
mergeASARs?: boolean;
|
||||
/**
|
||||
* Minimatch pattern of paths that are allowed to be present in one of the ASAR files, but not in the other.
|
||||
*/
|
||||
singleArchFiles?: string;
|
||||
/**
|
||||
* Minimatch pattern of binaries that are expected to be the same x64 binary in both of the ASAR files.
|
||||
*/
|
||||
x64ArchFiles?: string;
|
||||
/**
|
||||
* Minimatch pattern of paths that should not receive an injected ElectronAsarIntegrity value
|
||||
*/
|
||||
infoPlistsToIgnore?: string;
|
||||
};
|
||||
|
||||
enum AsarMode {
|
||||
NO_ASAR,
|
||||
HAS_ASAR,
|
||||
}
|
||||
|
||||
export const detectAsarMode = async (appPath: string) => {
|
||||
const asarPath = path.resolve(appPath, 'Contents', 'Resources', 'app.asar');
|
||||
|
||||
if (!(await fs.pathExists(asarPath))) return AsarMode.NO_ASAR;
|
||||
|
||||
return AsarMode.HAS_ASAR;
|
||||
};
|
||||
|
||||
enum AppFileType {
|
||||
MACHO,
|
||||
PLAIN,
|
||||
SNAPSHOT,
|
||||
APP_CODE,
|
||||
}
|
||||
|
||||
type AppFile = {
|
||||
relativePath: string;
|
||||
type: AppFileType;
|
||||
}
|
||||
|
||||
const getAllFiles = async (appPath: string): Promise<AppFile[]> => {
|
||||
const files: AppFile[] = [];
|
||||
|
||||
const visited = new Set<string>();
|
||||
const traverse = async (p: string) => {
|
||||
p = await fs.realpath(p);
|
||||
if (visited.has(p)) return;
|
||||
visited.add(p);
|
||||
|
||||
const info = await fs.stat(p);
|
||||
if (info.isSymbolicLink()) return;
|
||||
if (info.isFile()) {
|
||||
let fileType = AppFileType.PLAIN;
|
||||
|
||||
const fileOutput = await spawn('file', ['--brief', '--no-pad', p]);
|
||||
if (p.includes('app.asar')) {
|
||||
fileType = AppFileType.APP_CODE;
|
||||
} else if (fileOutput.startsWith(MACHO_PREFIX)) {
|
||||
fileType = AppFileType.MACHO;
|
||||
} else if (p.endsWith('.bin')) {
|
||||
fileType = AppFileType.SNAPSHOT;
|
||||
}
|
||||
|
||||
files.push({
|
||||
relativePath: path.relative(appPath, p),
|
||||
type: fileType,
|
||||
});
|
||||
}
|
||||
|
||||
if (info.isDirectory()) {
|
||||
for (const child of await fs.readdir(p)) {
|
||||
await traverse(path.resolve(p, child));
|
||||
}
|
||||
}
|
||||
};
|
||||
await traverse(appPath);
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
const dupedFiles = (files: AppFile[]) => files.filter(f => f.type !== AppFileType.SNAPSHOT && f.type !== AppFileType.APP_CODE);
|
||||
|
||||
const sha = async (filePath: string) => {
|
||||
return crypto.createHash('sha256').update(fs.readFileSync(filePath)).digest('hex');
|
||||
}
|
||||
const dupedFiles = (files: AppFile[]) =>
|
||||
files.filter((f) => f.type !== AppFileType.SNAPSHOT && f.type !== AppFileType.APP_CODE);
|
||||
|
||||
export const makeUniversalApp = async (opts: MakeUniversalOpts): Promise<void> => {
|
||||
d('making a universal app with options', opts);
|
||||
|
||||
if (process.platform !== 'darwin')
|
||||
throw new Error('@electron/universal is only supported on darwin platforms');
|
||||
if (!opts.x64AppPath || !path.isAbsolute(opts.x64AppPath))
|
||||
@@ -110,17 +66,21 @@ export const makeUniversalApp = async (opts: MakeUniversalOpts): Promise<void> =
|
||||
throw new Error('Expected opts.outAppPath to be an absolute path but it was not');
|
||||
|
||||
if (await fs.pathExists(opts.outAppPath)) {
|
||||
d('output path exists already');
|
||||
if (!opts.force) {
|
||||
throw new Error(
|
||||
`The out path "${opts.outAppPath}" already exists and force is not set to true`,
|
||||
);
|
||||
} else {
|
||||
d('overwriting existing application because force == true');
|
||||
await fs.remove(opts.outAppPath);
|
||||
}
|
||||
}
|
||||
|
||||
const x64AsarMode = await detectAsarMode(opts.x64AppPath);
|
||||
const arm64AsarMode = await detectAsarMode(opts.arm64AppPath);
|
||||
d('detected x64AsarMode =', x64AsarMode);
|
||||
d('detected arm64AsarMode =', arm64AsarMode);
|
||||
|
||||
if (x64AsarMode !== arm64AsarMode)
|
||||
throw new Error(
|
||||
@@ -128,23 +88,28 @@ export const makeUniversalApp = async (opts: MakeUniversalOpts): Promise<void> =
|
||||
);
|
||||
|
||||
const tmpDir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'electron-universal-'));
|
||||
d('building universal app in', tmpDir);
|
||||
|
||||
try {
|
||||
d('copying x64 app as starter template');
|
||||
const tmpApp = path.resolve(tmpDir, 'Tmp.app');
|
||||
await spawn('cp', ['-R', opts.x64AppPath, tmpApp]);
|
||||
|
||||
const uniqueToX64: string[] = [];
|
||||
const uniqueToArm64: string[] = [];
|
||||
const x64Files = await getAllFiles(await fs.realpath(tmpApp));
|
||||
const arm64Files = await getAllFiles(opts.arm64AppPath);
|
||||
const x64Files = await getAllAppFiles(await fs.realpath(tmpApp));
|
||||
const arm64Files = await getAllAppFiles(await fs.realpath(opts.arm64AppPath));
|
||||
|
||||
for (const file of dupedFiles(x64Files)) {
|
||||
if (!arm64Files.some(f => f.relativePath === file.relativePath)) uniqueToX64.push(file.relativePath);
|
||||
if (!arm64Files.some((f) => f.relativePath === file.relativePath))
|
||||
uniqueToX64.push(file.relativePath);
|
||||
}
|
||||
for (const file of dupedFiles(arm64Files)) {
|
||||
if (!x64Files.some(f => f.relativePath === file.relativePath)) uniqueToArm64.push(file.relativePath);
|
||||
if (!x64Files.some((f) => f.relativePath === file.relativePath))
|
||||
uniqueToArm64.push(file.relativePath);
|
||||
}
|
||||
if (uniqueToX64.length !== 0 || uniqueToArm64.length !== 0) {
|
||||
d('some files were not in both builds, aborting');
|
||||
console.error({
|
||||
uniqueToX64,
|
||||
uniqueToArm64,
|
||||
@@ -154,59 +119,232 @@ export const makeUniversalApp = async (opts: MakeUniversalOpts): Promise<void> =
|
||||
);
|
||||
}
|
||||
|
||||
for (const file of x64Files.filter(f => f.type === AppFileType.PLAIN)) {
|
||||
for (const file of x64Files.filter((f) => f.type === AppFileType.PLAIN)) {
|
||||
const x64Sha = await sha(path.resolve(opts.x64AppPath, file.relativePath));
|
||||
const arm64Sha = await sha(path.resolve(opts.arm64AppPath, file.relativePath));
|
||||
if (x64Sha !== arm64Sha) {
|
||||
console.error(`${x64Sha} !== ${arm64Sha}`);
|
||||
throw new Error(`Expected all non-binary files to have identical SHAs when creating a universal build but "${file.relativePath}" did not`);
|
||||
d('SHA for file', file.relativePath, `does not match across builds ${x64Sha}!=${arm64Sha}`);
|
||||
// The MainMenu.nib files generated by Xcode13 are deterministic in effect but not deterministic in generated sequence
|
||||
if (path.basename(path.dirname(file.relativePath)) === 'MainMenu.nib') {
|
||||
// The mismatch here is OK so we just move on to the next one
|
||||
continue;
|
||||
}
|
||||
throw new Error(
|
||||
`Expected all non-binary files to have identical SHAs when creating a universal build but "${file.relativePath}" did not`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (const machOFile of x64Files.filter(f => f.type === AppFileType.MACHO)) {
|
||||
for (const machOFile of x64Files.filter((f) => f.type === AppFileType.MACHO)) {
|
||||
const first = await fs.realpath(path.resolve(tmpApp, machOFile.relativePath));
|
||||
const second = await fs.realpath(path.resolve(opts.arm64AppPath, machOFile.relativePath));
|
||||
|
||||
const x64Sha = await sha(path.resolve(opts.x64AppPath, machOFile.relativePath));
|
||||
const arm64Sha = await sha(path.resolve(opts.arm64AppPath, machOFile.relativePath));
|
||||
if (x64Sha === arm64Sha) {
|
||||
if (
|
||||
opts.x64ArchFiles === undefined ||
|
||||
!minimatch(machOFile.relativePath, opts.x64ArchFiles, { matchBase: true })
|
||||
) {
|
||||
throw new Error(
|
||||
`Detected file "${machOFile.relativePath}" that's the same in both x64 and arm64 builds and not covered by the ` +
|
||||
`x64ArchFiles rule: "${opts.x64ArchFiles}"`,
|
||||
);
|
||||
}
|
||||
|
||||
d(
|
||||
'SHA for Mach-O file',
|
||||
machOFile.relativePath,
|
||||
`matches across builds ${x64Sha}===${arm64Sha}, skipping lipo`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
d('joining two MachO files with lipo', {
|
||||
first,
|
||||
second,
|
||||
});
|
||||
await spawn('lipo', [
|
||||
await fs.realpath(path.resolve(tmpApp, machOFile.relativePath)),
|
||||
await fs.realpath(path.resolve(opts.arm64AppPath, machOFile.relativePath)),
|
||||
first,
|
||||
second,
|
||||
'-create',
|
||||
'-output',
|
||||
await fs.realpath(path.resolve(tmpApp, machOFile.relativePath)),
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* If we don't have an ASAR we need to check if the two "app" folders are identical, if
|
||||
* they are then we can just leave one there and call it a day. If the app folders for x64
|
||||
* and arm64 are different though we need to rename each folder and create a new fake "app"
|
||||
* entrypoint to dynamically load the correct app folder
|
||||
*/
|
||||
if (x64AsarMode === AsarMode.NO_ASAR) {
|
||||
await fs.move(path.resolve(tmpApp, 'Contents', 'Resources', 'app'), path.resolve(tmpApp, 'Contents', 'Resources', 'x64.app'));
|
||||
await fs.copy(path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app'), path.resolve(tmpApp, 'Contents', 'Resources', 'arm64.app'));
|
||||
} else {
|
||||
await fs.move(path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'), path.resolve(tmpApp, 'Contents', 'Resources', 'x64.app.asar'));
|
||||
const x64Unpacked = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar.unpacked');
|
||||
if (await fs.pathExists(x64Unpacked)) {
|
||||
await fs.move(x64Unpacked, path.resolve(tmpApp, 'Contents', 'Resources', 'x64.app.asar.unpacked'));
|
||||
}
|
||||
d('checking if the x64 and arm64 app folders are identical');
|
||||
const comparison = await dircompare.compare(
|
||||
path.resolve(tmpApp, 'Contents', 'Resources', 'app'),
|
||||
path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app'),
|
||||
{ compareSize: true, compareContent: true },
|
||||
);
|
||||
|
||||
await fs.copy(path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'), path.resolve(tmpApp, 'Contents', 'Resources', 'arm64.app.asar'));
|
||||
const arm64Unpacked = path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar.unpacked');
|
||||
if (await fs.pathExists(arm64Unpacked)) {
|
||||
await fs.copy(arm64Unpacked, path.resolve(tmpApp, 'Contents', 'Resources', 'arm64.app.asar.unpacked'));
|
||||
if (!comparison.same) {
|
||||
d('x64 and arm64 app folders are different, creating dynamic entry ASAR');
|
||||
await fs.move(
|
||||
path.resolve(tmpApp, 'Contents', 'Resources', 'app'),
|
||||
path.resolve(tmpApp, 'Contents', 'Resources', 'app-x64'),
|
||||
);
|
||||
await fs.copy(
|
||||
path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app'),
|
||||
path.resolve(tmpApp, 'Contents', 'Resources', 'app-arm64'),
|
||||
);
|
||||
|
||||
const entryAsar = path.resolve(tmpDir, 'entry-asar');
|
||||
await fs.mkdir(entryAsar);
|
||||
await fs.copy(
|
||||
path.resolve(__dirname, '..', '..', 'entry-asar', 'no-asar.js'),
|
||||
path.resolve(entryAsar, 'index.js'),
|
||||
);
|
||||
let pj = await fs.readJson(
|
||||
path.resolve(opts.x64AppPath, 'Contents', 'Resources', 'app', 'package.json'),
|
||||
);
|
||||
pj.main = 'index.js';
|
||||
await fs.writeJson(path.resolve(entryAsar, 'package.json'), pj);
|
||||
await asar.createPackage(
|
||||
entryAsar,
|
||||
path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'),
|
||||
);
|
||||
} else {
|
||||
d('x64 and arm64 app folders are the same');
|
||||
}
|
||||
}
|
||||
|
||||
const entryAsar = path.resolve(tmpDir, 'entry-asar');
|
||||
await fs.mkdir(entryAsar);
|
||||
await fs.copy(path.resolve(__dirname, '..', '..', 'entry-asar', 'index.js'), path.resolve(entryAsar, 'index.js'));
|
||||
let pj: any;
|
||||
if (x64AsarMode === AsarMode.NO_ASAR) {
|
||||
pj = await fs.readJson(path.resolve(opts.x64AppPath, 'Contents', 'Resources', 'app', 'package.json'));
|
||||
} else {
|
||||
pj = JSON.parse((await asar.extractFile(path.resolve(opts.x64AppPath, 'Contents', 'Resources', 'app.asar'), 'package.json')).toString('utf8'));
|
||||
}
|
||||
pj.main = 'index.js';
|
||||
await fs.writeJson(path.resolve(entryAsar, 'package.json'), pj);
|
||||
await asar.createPackage(entryAsar, path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'));
|
||||
const generatedIntegrity: Record<string, { algorithm: 'SHA256'; hash: string }> = {};
|
||||
let didSplitAsar = false;
|
||||
|
||||
for (const snapshotsFile of arm64Files.filter(f => f.type === AppFileType.SNAPSHOT)) {
|
||||
await fs.copy(path.resolve(opts.arm64AppPath, snapshotsFile.relativePath), path.resolve(tmpApp, snapshotsFile.relativePath));
|
||||
/**
|
||||
* If we have an ASAR we just need to check if the two "app.asar" files have the same hash,
|
||||
* if they are, same as above, we can leave one there and call it a day. If they're different
|
||||
* we have to make a dynamic entrypoint. There is an assumption made here that every file in
|
||||
* app.asar.unpacked is a native node module. This assumption _may_ not be true so we should
|
||||
* look at codifying that assumption as actual logic.
|
||||
*/
|
||||
// FIXME: Codify the assumption that app.asar.unpacked only contains native modules
|
||||
if (x64AsarMode === AsarMode.HAS_ASAR && opts.mergeASARs) {
|
||||
d('merging x64 and arm64 asars');
|
||||
const output = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar');
|
||||
await mergeASARs({
|
||||
x64AsarPath: path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'),
|
||||
arm64AsarPath: path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'),
|
||||
outputAsarPath: output,
|
||||
singleArchFiles: opts.singleArchFiles,
|
||||
});
|
||||
|
||||
generatedIntegrity['Resources/app.asar'] = generateAsarIntegrity(output);
|
||||
} else if (x64AsarMode === AsarMode.HAS_ASAR) {
|
||||
d('checking if the x64 and arm64 asars are identical');
|
||||
const x64AsarSha = await sha(path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'));
|
||||
const arm64AsarSha = await sha(
|
||||
path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'),
|
||||
);
|
||||
|
||||
if (x64AsarSha !== arm64AsarSha) {
|
||||
didSplitAsar = true;
|
||||
d('x64 and arm64 asars are different');
|
||||
const x64AsarPath = path.resolve(tmpApp, 'Contents', 'Resources', 'app-x64.asar');
|
||||
await fs.move(path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'), x64AsarPath);
|
||||
const x64Unpacked = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar.unpacked');
|
||||
if (await fs.pathExists(x64Unpacked)) {
|
||||
await fs.move(
|
||||
x64Unpacked,
|
||||
path.resolve(tmpApp, 'Contents', 'Resources', 'app-x64.asar.unpacked'),
|
||||
);
|
||||
}
|
||||
|
||||
const arm64AsarPath = path.resolve(tmpApp, 'Contents', 'Resources', 'app-arm64.asar');
|
||||
await fs.copy(
|
||||
path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'),
|
||||
arm64AsarPath,
|
||||
);
|
||||
const arm64Unpacked = path.resolve(
|
||||
opts.arm64AppPath,
|
||||
'Contents',
|
||||
'Resources',
|
||||
'app.asar.unpacked',
|
||||
);
|
||||
if (await fs.pathExists(arm64Unpacked)) {
|
||||
await fs.copy(
|
||||
arm64Unpacked,
|
||||
path.resolve(tmpApp, 'Contents', 'Resources', 'app-arm64.asar.unpacked'),
|
||||
);
|
||||
}
|
||||
|
||||
const entryAsar = path.resolve(tmpDir, 'entry-asar');
|
||||
await fs.mkdir(entryAsar);
|
||||
await fs.copy(
|
||||
path.resolve(__dirname, '..', '..', 'entry-asar', 'has-asar.js'),
|
||||
path.resolve(entryAsar, 'index.js'),
|
||||
);
|
||||
let pj = JSON.parse(
|
||||
(
|
||||
await asar.extractFile(
|
||||
path.resolve(opts.x64AppPath, 'Contents', 'Resources', 'app.asar'),
|
||||
'package.json',
|
||||
)
|
||||
).toString('utf8'),
|
||||
);
|
||||
pj.main = 'index.js';
|
||||
await fs.writeJson(path.resolve(entryAsar, 'package.json'), pj);
|
||||
const asarPath = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar');
|
||||
await asar.createPackage(entryAsar, asarPath);
|
||||
|
||||
generatedIntegrity['Resources/app.asar'] = generateAsarIntegrity(asarPath);
|
||||
generatedIntegrity['Resources/app-x64.asar'] = generateAsarIntegrity(x64AsarPath);
|
||||
generatedIntegrity['Resources/app-arm64.asar'] = generateAsarIntegrity(arm64AsarPath);
|
||||
} else {
|
||||
d('x64 and arm64 asars are the same');
|
||||
generatedIntegrity['Resources/app.asar'] = generateAsarIntegrity(
|
||||
path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const plistFiles = x64Files.filter((f) => f.type === AppFileType.INFO_PLIST);
|
||||
for (const plistFile of plistFiles) {
|
||||
const x64PlistPath = path.resolve(opts.x64AppPath, plistFile.relativePath);
|
||||
const arm64PlistPath = path.resolve(opts.arm64AppPath, plistFile.relativePath);
|
||||
|
||||
const { ElectronAsarIntegrity: x64Integrity, ...x64Plist } = plist.parse(
|
||||
await fs.readFile(x64PlistPath, 'utf8'),
|
||||
) as any;
|
||||
const { ElectronAsarIntegrity: arm64Integrity, ...arm64Plist } = plist.parse(
|
||||
await fs.readFile(arm64PlistPath, 'utf8'),
|
||||
) as any;
|
||||
if (JSON.stringify(x64Plist) !== JSON.stringify(arm64Plist)) {
|
||||
throw new Error(
|
||||
`Expected all Info.plist files to be identical when ignoring integrity when creating a universal build but "${plistFile.relativePath}" was not`,
|
||||
);
|
||||
}
|
||||
|
||||
const injectAsarIntegrity =
|
||||
!opts.infoPlistsToIgnore ||
|
||||
minimatch(plistFile.relativePath, opts.infoPlistsToIgnore, { matchBase: true });
|
||||
const mergedPlist = injectAsarIntegrity
|
||||
? { ...x64Plist, ElectronAsarIntegrity: generatedIntegrity }
|
||||
: { ...x64Plist };
|
||||
|
||||
await fs.writeFile(path.resolve(tmpApp, plistFile.relativePath), plist.build(mergedPlist));
|
||||
}
|
||||
|
||||
for (const snapshotsFile of arm64Files.filter((f) => f.type === AppFileType.SNAPSHOT)) {
|
||||
d('copying snapshot file', snapshotsFile.relativePath, 'to target application');
|
||||
await fs.copy(
|
||||
path.resolve(opts.arm64AppPath, snapshotsFile.relativePath),
|
||||
path.resolve(tmpApp, snapshotsFile.relativePath),
|
||||
);
|
||||
}
|
||||
|
||||
d('moving final universal app to target destination');
|
||||
await fs.mkdirp(path.dirname(opts.outAppPath));
|
||||
await spawn('mv', [tmpApp, opts.outAppPath]);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
|
||||
16
src/sha.ts
Normal file
16
src/sha.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as crypto from 'crypto';
|
||||
import { d } from './debug';
|
||||
|
||||
export const sha = async (filePath: string) => {
|
||||
d('hashing', filePath);
|
||||
const hash = crypto.createHash('sha256');
|
||||
hash.setEncoding('hex');
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
fileStream.pipe(hash);
|
||||
await new Promise((resolve, reject) => {
|
||||
fileStream.on('end', () => resolve());
|
||||
fileStream.on('error', (err) => reject(err));
|
||||
});
|
||||
return hash.read();
|
||||
};
|
||||
4
tsconfig.cjs.json
Normal file
4
tsconfig.cjs.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"include": ["src"]
|
||||
}
|
||||
10
tsconfig.entry-asar.json
Normal file
10
tsconfig.entry-asar.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "entry-asar",
|
||||
},
|
||||
"include": [
|
||||
"entry-asar"
|
||||
],
|
||||
"exclude": []
|
||||
}
|
||||
@@ -3,5 +3,6 @@
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"outDir": "dist/esm"
|
||||
}
|
||||
}
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
|
||||
@@ -13,9 +13,11 @@
|
||||
],
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"moduleResolution": "node",
|
||||
"esModuleInterop": true,
|
||||
"declaration": true
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
"src",
|
||||
"entry-asar"
|
||||
]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user