init
This commit is contained in:
15
mc_test/node_modules/dmg-builder/node_modules/fs-extra/LICENSE
generated
vendored
Executable file
15
mc_test/node_modules/dmg-builder/node_modules/fs-extra/LICENSE
generated
vendored
Executable file
@ -0,0 +1,15 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2011-2017 JP Richardson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
|
||||
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
|
||||
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
|
||||
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
262
mc_test/node_modules/dmg-builder/node_modules/fs-extra/README.md
generated
vendored
Executable file
262
mc_test/node_modules/dmg-builder/node_modules/fs-extra/README.md
generated
vendored
Executable file
@ -0,0 +1,262 @@
|
||||
Node.js: fs-extra
|
||||
=================
|
||||
|
||||
`fs-extra` adds file system methods that aren't included in the native `fs` module and adds promise support to the `fs` methods. It also uses [`graceful-fs`](https://github.com/isaacs/node-graceful-fs) to prevent `EMFILE` errors. It should be a drop in replacement for `fs`.
|
||||
|
||||
[](https://www.npmjs.org/package/fs-extra)
|
||||
[](https://github.com/jprichardson/node-fs-extra/blob/master/LICENSE)
|
||||
[](https://github.com/jprichardson/node-fs-extra/actions/workflows/ci.yml?query=branch%3Amaster)
|
||||
[](https://www.npmjs.org/package/fs-extra)
|
||||
[](https://standardjs.com)
|
||||
|
||||
Why?
|
||||
----
|
||||
|
||||
I got tired of including `mkdirp`, `rimraf`, and `ncp` in most of my projects.
|
||||
|
||||
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
npm install fs-extra
|
||||
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
`fs-extra` is a drop in replacement for native `fs`. All methods in `fs` are attached to `fs-extra`. All `fs` methods return promises if the callback isn't passed.
|
||||
|
||||
You don't ever need to include the original `fs` module again:
|
||||
|
||||
```js
|
||||
const fs = require('fs') // this is no longer necessary
|
||||
```
|
||||
|
||||
you can now do this:
|
||||
|
||||
```js
|
||||
const fs = require('fs-extra')
|
||||
```
|
||||
|
||||
or if you prefer to make it clear that you're using `fs-extra` and not `fs`, you may want
|
||||
to name your `fs` variable `fse` like so:
|
||||
|
||||
```js
|
||||
const fse = require('fs-extra')
|
||||
```
|
||||
|
||||
you can also keep both, but it's redundant:
|
||||
|
||||
```js
|
||||
const fs = require('fs')
|
||||
const fse = require('fs-extra')
|
||||
```
|
||||
|
||||
Sync vs Async vs Async/Await
|
||||
-------------
|
||||
Most methods are async by default. All async methods will return a promise if the callback isn't passed.
|
||||
|
||||
Sync methods on the other hand will throw if an error occurs.
|
||||
|
||||
Also Async/Await will throw an error if one occurs.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
const fs = require('fs-extra')
|
||||
|
||||
// Async with promises:
|
||||
fs.copy('/tmp/myfile', '/tmp/mynewfile')
|
||||
.then(() => console.log('success!'))
|
||||
.catch(err => console.error(err))
|
||||
|
||||
// Async with callbacks:
|
||||
fs.copy('/tmp/myfile', '/tmp/mynewfile', err => {
|
||||
if (err) return console.error(err)
|
||||
console.log('success!')
|
||||
})
|
||||
|
||||
// Sync:
|
||||
try {
|
||||
fs.copySync('/tmp/myfile', '/tmp/mynewfile')
|
||||
console.log('success!')
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
|
||||
// Async/Await:
|
||||
async function copyFiles () {
|
||||
try {
|
||||
await fs.copy('/tmp/myfile', '/tmp/mynewfile')
|
||||
console.log('success!')
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
}
|
||||
|
||||
copyFiles()
|
||||
```
|
||||
|
||||
|
||||
Methods
|
||||
-------
|
||||
|
||||
### Async
|
||||
|
||||
- [copy](docs/copy.md)
|
||||
- [emptyDir](docs/emptyDir.md)
|
||||
- [ensureFile](docs/ensureFile.md)
|
||||
- [ensureDir](docs/ensureDir.md)
|
||||
- [ensureLink](docs/ensureLink.md)
|
||||
- [ensureSymlink](docs/ensureSymlink.md)
|
||||
- [mkdirp](docs/ensureDir.md)
|
||||
- [mkdirs](docs/ensureDir.md)
|
||||
- [move](docs/move.md)
|
||||
- [outputFile](docs/outputFile.md)
|
||||
- [outputJson](docs/outputJson.md)
|
||||
- [pathExists](docs/pathExists.md)
|
||||
- [readJson](docs/readJson.md)
|
||||
- [remove](docs/remove.md)
|
||||
- [writeJson](docs/writeJson.md)
|
||||
|
||||
### Sync
|
||||
|
||||
- [copySync](docs/copy-sync.md)
|
||||
- [emptyDirSync](docs/emptyDir-sync.md)
|
||||
- [ensureFileSync](docs/ensureFile-sync.md)
|
||||
- [ensureDirSync](docs/ensureDir-sync.md)
|
||||
- [ensureLinkSync](docs/ensureLink-sync.md)
|
||||
- [ensureSymlinkSync](docs/ensureSymlink-sync.md)
|
||||
- [mkdirpSync](docs/ensureDir-sync.md)
|
||||
- [mkdirsSync](docs/ensureDir-sync.md)
|
||||
- [moveSync](docs/move-sync.md)
|
||||
- [outputFileSync](docs/outputFile-sync.md)
|
||||
- [outputJsonSync](docs/outputJson-sync.md)
|
||||
- [pathExistsSync](docs/pathExists-sync.md)
|
||||
- [readJsonSync](docs/readJson-sync.md)
|
||||
- [removeSync](docs/remove-sync.md)
|
||||
- [writeJsonSync](docs/writeJson-sync.md)
|
||||
|
||||
|
||||
**NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()`, `fs.write()`, & `fs.writev()`](docs/fs-read-write-writev.md)
|
||||
|
||||
### What happened to `walk()` and `walkSync()`?
|
||||
|
||||
They were removed from `fs-extra` in v2.0.0. If you need the functionality, `walk` and `walkSync` are available as separate packages, [`klaw`](https://github.com/jprichardson/node-klaw) and [`klaw-sync`](https://github.com/manidlou/node-klaw-sync).
|
||||
|
||||
|
||||
Third Party
|
||||
-----------
|
||||
|
||||
### CLI
|
||||
|
||||
[fse-cli](https://www.npmjs.com/package/@atao60/fse-cli) allows you to run `fs-extra` from a console or from [npm](https://www.npmjs.com) scripts.
|
||||
|
||||
### TypeScript
|
||||
|
||||
If you like TypeScript, you can use `fs-extra` with it: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/fs-extra
|
||||
|
||||
|
||||
### File / Directory Watching
|
||||
|
||||
If you want to watch for changes to files or directories, then you should use [chokidar](https://github.com/paulmillr/chokidar).
|
||||
|
||||
### Obtain Filesystem (Devices, Partitions) Information
|
||||
|
||||
[fs-filesystem](https://github.com/arthurintelligence/node-fs-filesystem) allows you to read the state of the filesystem of the host on which it is run. It returns information about both the devices and the partitions (volumes) of the system.
|
||||
|
||||
### Misc.
|
||||
|
||||
- [fs-extra-debug](https://github.com/jdxcode/fs-extra-debug) - Send your fs-extra calls to [debug](https://npmjs.org/package/debug).
|
||||
- [mfs](https://github.com/cadorn/mfs) - Monitor your fs-extra calls.
|
||||
|
||||
|
||||
|
||||
Hacking on fs-extra
|
||||
-------------------
|
||||
|
||||
Wanna hack on `fs-extra`? Great! Your help is needed! [fs-extra is one of the most depended upon Node.js packages](http://nodei.co/npm/fs-extra.png?downloads=true&downloadRank=true&stars=true). This project
|
||||
uses [JavaScript Standard Style](https://github.com/feross/standard) - if the name or style choices bother you,
|
||||
you're gonna have to get over it :) If `standard` is good enough for `npm`, it's good enough for `fs-extra`.
|
||||
|
||||
[](https://github.com/feross/standard)
|
||||
|
||||
What's needed?
|
||||
- First, take a look at existing issues. Those are probably going to be where the priority lies.
|
||||
- More tests for edge cases. Specifically on different platforms. There can never be enough tests.
|
||||
- Improve test coverage.
|
||||
|
||||
Note: If you make any big changes, **you should definitely file an issue for discussion first.**
|
||||
|
||||
### Running the Test Suite
|
||||
|
||||
fs-extra contains hundreds of tests.
|
||||
|
||||
- `npm run lint`: runs the linter ([standard](http://standardjs.com/))
|
||||
- `npm run unit`: runs the unit tests
|
||||
- `npm test`: runs both the linter and the tests
|
||||
|
||||
|
||||
### Windows
|
||||
|
||||
If you run the tests on the Windows and receive a lot of symbolic link `EPERM` permission errors, it's
|
||||
because on Windows you need elevated privilege to create symbolic links. You can add this to your Windows's
|
||||
account by following the instructions here: http://superuser.com/questions/104845/permission-to-make-symbolic-links-in-windows-7
|
||||
However, I didn't have much luck doing this.
|
||||
|
||||
Since I develop on Mac OS X, I use VMWare Fusion for Windows testing. I create a shared folder that I map to a drive on Windows.
|
||||
I open the `Node.js command prompt` and run as `Administrator`. I then map the network drive running the following command:
|
||||
|
||||
net use z: "\\vmware-host\Shared Folders"
|
||||
|
||||
I can then navigate to my `fs-extra` directory and run the tests.
|
||||
|
||||
|
||||
Naming
|
||||
------
|
||||
|
||||
I put a lot of thought into the naming of these functions. Inspired by @coolaj86's request. So he deserves much of the credit for raising the issue. See discussion(s) here:
|
||||
|
||||
* https://github.com/jprichardson/node-fs-extra/issues/2
|
||||
* https://github.com/flatiron/utile/issues/11
|
||||
* https://github.com/ryanmcgrath/wrench-js/issues/29
|
||||
* https://github.com/substack/node-mkdirp/issues/17
|
||||
|
||||
First, I believe that in as many cases as possible, the [Node.js naming schemes](http://nodejs.org/api/fs.html) should be chosen. However, there are problems with the Node.js own naming schemes.
|
||||
|
||||
For example, `fs.readFile()` and `fs.readdir()`: the **F** is capitalized in *File* and the **d** is not capitalized in *dir*. Perhaps a bit pedantic, but they should still be consistent. Also, Node.js has chosen a lot of POSIX naming schemes, which I believe is great. See: `fs.mkdir()`, `fs.rmdir()`, `fs.chown()`, etc.
|
||||
|
||||
We have a dilemma though. How do you consistently name methods that perform the following POSIX commands: `cp`, `cp -r`, `mkdir -p`, and `rm -rf`?
|
||||
|
||||
My perspective: when in doubt, err on the side of simplicity. A directory is just a hierarchical grouping of directories and files. Consider that for a moment. So when you want to copy it or remove it, in most cases you'll want to copy or remove all of its contents. When you want to create a directory, if the directory that it's suppose to be contained in does not exist, then in most cases you'll want to create that too.
|
||||
|
||||
So, if you want to remove a file or a directory regardless of whether it has contents, just call `fs.remove(path)`. If you want to copy a file or a directory whether it has contents, just call `fs.copy(source, destination)`. If you want to create a directory regardless of whether its parent directories exist, just call `fs.mkdirs(path)` or `fs.mkdirp(path)`.
|
||||
|
||||
|
||||
Credit
|
||||
------
|
||||
|
||||
`fs-extra` wouldn't be possible without using the modules from the following authors:
|
||||
|
||||
- [Isaac Shlueter](https://github.com/isaacs)
|
||||
- [Charlie McConnel](https://github.com/avianflu)
|
||||
- [James Halliday](https://github.com/substack)
|
||||
- [Andrew Kelley](https://github.com/andrewrk)
|
||||
|
||||
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Licensed under MIT
|
||||
|
||||
Copyright (c) 2011-2017 [JP Richardson](https://github.com/jprichardson)
|
||||
|
||||
[1]: http://nodejs.org/docs/latest/api/fs.html
|
||||
|
||||
|
||||
[jsonfile]: https://github.com/jprichardson/node-jsonfile
|
||||
169
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/copy/copy-sync.js
generated
vendored
Executable file
169
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/copy/copy-sync.js
generated
vendored
Executable file
@ -0,0 +1,169 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const mkdirsSync = require('../mkdirs').mkdirsSync
|
||||
const utimesMillisSync = require('../util/utimes').utimesMillisSync
|
||||
const stat = require('../util/stat')
|
||||
|
||||
function copySync (src, dest, opts) {
|
||||
if (typeof opts === 'function') {
|
||||
opts = { filter: opts }
|
||||
}
|
||||
|
||||
opts = opts || {}
|
||||
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
|
||||
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
|
||||
|
||||
// Warn about using preserveTimestamps on 32-bit node
|
||||
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
||||
process.emitWarning(
|
||||
'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' +
|
||||
'\tsee https://github.com/jprichardson/node-fs-extra/issues/269',
|
||||
'Warning', 'fs-extra-WARN0002'
|
||||
)
|
||||
}
|
||||
|
||||
const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts)
|
||||
stat.checkParentPathsSync(src, srcStat, dest, 'copy')
|
||||
return handleFilterAndCopy(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
function handleFilterAndCopy (destStat, src, dest, opts) {
|
||||
if (opts.filter && !opts.filter(src, dest)) return
|
||||
const destParent = path.dirname(dest)
|
||||
if (!fs.existsSync(destParent)) mkdirsSync(destParent)
|
||||
return getStats(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
function startCopy (destStat, src, dest, opts) {
|
||||
if (opts.filter && !opts.filter(src, dest)) return
|
||||
return getStats(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
function getStats (destStat, src, dest, opts) {
|
||||
const statSync = opts.dereference ? fs.statSync : fs.lstatSync
|
||||
const srcStat = statSync(src)
|
||||
|
||||
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)
|
||||
else if (srcStat.isFile() ||
|
||||
srcStat.isCharacterDevice() ||
|
||||
srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts)
|
||||
else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)
|
||||
else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`)
|
||||
else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`)
|
||||
throw new Error(`Unknown file: ${src}`)
|
||||
}
|
||||
|
||||
function onFile (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) return copyFile(srcStat, src, dest, opts)
|
||||
return mayCopyFile(srcStat, src, dest, opts)
|
||||
}
|
||||
|
||||
function mayCopyFile (srcStat, src, dest, opts) {
|
||||
if (opts.overwrite) {
|
||||
fs.unlinkSync(dest)
|
||||
return copyFile(srcStat, src, dest, opts)
|
||||
} else if (opts.errorOnExist) {
|
||||
throw new Error(`'${dest}' already exists`)
|
||||
}
|
||||
}
|
||||
|
||||
function copyFile (srcStat, src, dest, opts) {
|
||||
fs.copyFileSync(src, dest)
|
||||
if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)
|
||||
return setDestMode(dest, srcStat.mode)
|
||||
}
|
||||
|
||||
function handleTimestamps (srcMode, src, dest) {
|
||||
// Make sure the file is writable before setting the timestamp
|
||||
// otherwise open fails with EPERM when invoked with 'r+'
|
||||
// (through utimes call)
|
||||
if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)
|
||||
return setDestTimestamps(src, dest)
|
||||
}
|
||||
|
||||
function fileIsNotWritable (srcMode) {
|
||||
return (srcMode & 0o200) === 0
|
||||
}
|
||||
|
||||
function makeFileWritable (dest, srcMode) {
|
||||
return setDestMode(dest, srcMode | 0o200)
|
||||
}
|
||||
|
||||
function setDestMode (dest, srcMode) {
|
||||
return fs.chmodSync(dest, srcMode)
|
||||
}
|
||||
|
||||
function setDestTimestamps (src, dest) {
|
||||
// The initial srcStat.atime cannot be trusted
|
||||
// because it is modified by the read(2) system call
|
||||
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
||||
const updatedSrcStat = fs.statSync(src)
|
||||
return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
|
||||
}
|
||||
|
||||
function onDir (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)
|
||||
return copyDir(src, dest, opts)
|
||||
}
|
||||
|
||||
function mkDirAndCopy (srcMode, src, dest, opts) {
|
||||
fs.mkdirSync(dest)
|
||||
copyDir(src, dest, opts)
|
||||
return setDestMode(dest, srcMode)
|
||||
}
|
||||
|
||||
function copyDir (src, dest, opts) {
|
||||
fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts))
|
||||
}
|
||||
|
||||
function copyDirItem (item, src, dest, opts) {
|
||||
const srcItem = path.join(src, item)
|
||||
const destItem = path.join(dest, item)
|
||||
const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts)
|
||||
return startCopy(destStat, srcItem, destItem, opts)
|
||||
}
|
||||
|
||||
function onLink (destStat, src, dest, opts) {
|
||||
let resolvedSrc = fs.readlinkSync(src)
|
||||
if (opts.dereference) {
|
||||
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
|
||||
}
|
||||
|
||||
if (!destStat) {
|
||||
return fs.symlinkSync(resolvedSrc, dest)
|
||||
} else {
|
||||
let resolvedDest
|
||||
try {
|
||||
resolvedDest = fs.readlinkSync(dest)
|
||||
} catch (err) {
|
||||
// dest exists and is a regular file or directory,
|
||||
// Windows may throw UNKNOWN error. If dest already exists,
|
||||
// fs throws error anyway, so no need to guard against it here.
|
||||
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest)
|
||||
throw err
|
||||
}
|
||||
if (opts.dereference) {
|
||||
resolvedDest = path.resolve(process.cwd(), resolvedDest)
|
||||
}
|
||||
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
|
||||
throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)
|
||||
}
|
||||
|
||||
// prevent copy if src is a subdir of dest since unlinking
|
||||
// dest in this case would result in removing src contents
|
||||
// and therefore a broken symlink would be created.
|
||||
if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
|
||||
throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)
|
||||
}
|
||||
return copyLink(resolvedSrc, dest)
|
||||
}
|
||||
}
|
||||
|
||||
function copyLink (resolvedSrc, dest) {
|
||||
fs.unlinkSync(dest)
|
||||
return fs.symlinkSync(resolvedSrc, dest)
|
||||
}
|
||||
|
||||
module.exports = copySync
|
||||
235
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/copy/copy.js
generated
vendored
Executable file
235
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/copy/copy.js
generated
vendored
Executable file
@ -0,0 +1,235 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const mkdirs = require('../mkdirs').mkdirs
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
const utimesMillis = require('../util/utimes').utimesMillis
|
||||
const stat = require('../util/stat')
|
||||
|
||||
function copy (src, dest, opts, cb) {
|
||||
if (typeof opts === 'function' && !cb) {
|
||||
cb = opts
|
||||
opts = {}
|
||||
} else if (typeof opts === 'function') {
|
||||
opts = { filter: opts }
|
||||
}
|
||||
|
||||
cb = cb || function () {}
|
||||
opts = opts || {}
|
||||
|
||||
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
|
||||
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
|
||||
|
||||
// Warn about using preserveTimestamps on 32-bit node
|
||||
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
||||
process.emitWarning(
|
||||
'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' +
|
||||
'\tsee https://github.com/jprichardson/node-fs-extra/issues/269',
|
||||
'Warning', 'fs-extra-WARN0001'
|
||||
)
|
||||
}
|
||||
|
||||
stat.checkPaths(src, dest, 'copy', opts, (err, stats) => {
|
||||
if (err) return cb(err)
|
||||
const { srcStat, destStat } = stats
|
||||
stat.checkParentPaths(src, srcStat, dest, 'copy', err => {
|
||||
if (err) return cb(err)
|
||||
if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb)
|
||||
return checkParentDir(destStat, src, dest, opts, cb)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function checkParentDir (destStat, src, dest, opts, cb) {
|
||||
const destParent = path.dirname(dest)
|
||||
pathExists(destParent, (err, dirExists) => {
|
||||
if (err) return cb(err)
|
||||
if (dirExists) return getStats(destStat, src, dest, opts, cb)
|
||||
mkdirs(destParent, err => {
|
||||
if (err) return cb(err)
|
||||
return getStats(destStat, src, dest, opts, cb)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function handleFilter (onInclude, destStat, src, dest, opts, cb) {
|
||||
Promise.resolve(opts.filter(src, dest)).then(include => {
|
||||
if (include) return onInclude(destStat, src, dest, opts, cb)
|
||||
return cb()
|
||||
}, error => cb(error))
|
||||
}
|
||||
|
||||
function startCopy (destStat, src, dest, opts, cb) {
|
||||
if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb)
|
||||
return getStats(destStat, src, dest, opts, cb)
|
||||
}
|
||||
|
||||
function getStats (destStat, src, dest, opts, cb) {
|
||||
const stat = opts.dereference ? fs.stat : fs.lstat
|
||||
stat(src, (err, srcStat) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb)
|
||||
else if (srcStat.isFile() ||
|
||||
srcStat.isCharacterDevice() ||
|
||||
srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb)
|
||||
else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb)
|
||||
else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`))
|
||||
else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`))
|
||||
return cb(new Error(`Unknown file: ${src}`))
|
||||
})
|
||||
}
|
||||
|
||||
function onFile (srcStat, destStat, src, dest, opts, cb) {
|
||||
if (!destStat) return copyFile(srcStat, src, dest, opts, cb)
|
||||
return mayCopyFile(srcStat, src, dest, opts, cb)
|
||||
}
|
||||
|
||||
function mayCopyFile (srcStat, src, dest, opts, cb) {
|
||||
if (opts.overwrite) {
|
||||
fs.unlink(dest, err => {
|
||||
if (err) return cb(err)
|
||||
return copyFile(srcStat, src, dest, opts, cb)
|
||||
})
|
||||
} else if (opts.errorOnExist) {
|
||||
return cb(new Error(`'${dest}' already exists`))
|
||||
} else return cb()
|
||||
}
|
||||
|
||||
function copyFile (srcStat, src, dest, opts, cb) {
|
||||
fs.copyFile(src, dest, err => {
|
||||
if (err) return cb(err)
|
||||
if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb)
|
||||
return setDestMode(dest, srcStat.mode, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function handleTimestampsAndMode (srcMode, src, dest, cb) {
|
||||
// Make sure the file is writable before setting the timestamp
|
||||
// otherwise open fails with EPERM when invoked with 'r+'
|
||||
// (through utimes call)
|
||||
if (fileIsNotWritable(srcMode)) {
|
||||
return makeFileWritable(dest, srcMode, err => {
|
||||
if (err) return cb(err)
|
||||
return setDestTimestampsAndMode(srcMode, src, dest, cb)
|
||||
})
|
||||
}
|
||||
return setDestTimestampsAndMode(srcMode, src, dest, cb)
|
||||
}
|
||||
|
||||
function fileIsNotWritable (srcMode) {
|
||||
return (srcMode & 0o200) === 0
|
||||
}
|
||||
|
||||
function makeFileWritable (dest, srcMode, cb) {
|
||||
return setDestMode(dest, srcMode | 0o200, cb)
|
||||
}
|
||||
|
||||
function setDestTimestampsAndMode (srcMode, src, dest, cb) {
|
||||
setDestTimestamps(src, dest, err => {
|
||||
if (err) return cb(err)
|
||||
return setDestMode(dest, srcMode, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function setDestMode (dest, srcMode, cb) {
|
||||
return fs.chmod(dest, srcMode, cb)
|
||||
}
|
||||
|
||||
function setDestTimestamps (src, dest, cb) {
|
||||
// The initial srcStat.atime cannot be trusted
|
||||
// because it is modified by the read(2) system call
|
||||
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
||||
fs.stat(src, (err, updatedSrcStat) => {
|
||||
if (err) return cb(err)
|
||||
return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function onDir (srcStat, destStat, src, dest, opts, cb) {
|
||||
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb)
|
||||
return copyDir(src, dest, opts, cb)
|
||||
}
|
||||
|
||||
function mkDirAndCopy (srcMode, src, dest, opts, cb) {
|
||||
fs.mkdir(dest, err => {
|
||||
if (err) return cb(err)
|
||||
copyDir(src, dest, opts, err => {
|
||||
if (err) return cb(err)
|
||||
return setDestMode(dest, srcMode, cb)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function copyDir (src, dest, opts, cb) {
|
||||
fs.readdir(src, (err, items) => {
|
||||
if (err) return cb(err)
|
||||
return copyDirItems(items, src, dest, opts, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function copyDirItems (items, src, dest, opts, cb) {
|
||||
const item = items.pop()
|
||||
if (!item) return cb()
|
||||
return copyDirItem(items, item, src, dest, opts, cb)
|
||||
}
|
||||
|
||||
function copyDirItem (items, item, src, dest, opts, cb) {
|
||||
const srcItem = path.join(src, item)
|
||||
const destItem = path.join(dest, item)
|
||||
stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => {
|
||||
if (err) return cb(err)
|
||||
const { destStat } = stats
|
||||
startCopy(destStat, srcItem, destItem, opts, err => {
|
||||
if (err) return cb(err)
|
||||
return copyDirItems(items, src, dest, opts, cb)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function onLink (destStat, src, dest, opts, cb) {
|
||||
fs.readlink(src, (err, resolvedSrc) => {
|
||||
if (err) return cb(err)
|
||||
if (opts.dereference) {
|
||||
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
|
||||
}
|
||||
|
||||
if (!destStat) {
|
||||
return fs.symlink(resolvedSrc, dest, cb)
|
||||
} else {
|
||||
fs.readlink(dest, (err, resolvedDest) => {
|
||||
if (err) {
|
||||
// dest exists and is a regular file or directory,
|
||||
// Windows may throw UNKNOWN error. If dest already exists,
|
||||
// fs throws error anyway, so no need to guard against it here.
|
||||
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb)
|
||||
return cb(err)
|
||||
}
|
||||
if (opts.dereference) {
|
||||
resolvedDest = path.resolve(process.cwd(), resolvedDest)
|
||||
}
|
||||
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
|
||||
return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`))
|
||||
}
|
||||
|
||||
// do not copy if src is a subdir of dest since unlinking
|
||||
// dest in this case would result in removing src contents
|
||||
// and therefore a broken symlink would be created.
|
||||
if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
|
||||
return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`))
|
||||
}
|
||||
return copyLink(resolvedSrc, dest, cb)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function copyLink (resolvedSrc, dest, cb) {
|
||||
fs.unlink(dest, err => {
|
||||
if (err) return cb(err)
|
||||
return fs.symlink(resolvedSrc, dest, cb)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = copy
|
||||
7
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/copy/index.js
generated
vendored
Executable file
7
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/copy/index.js
generated
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromCallback
|
||||
module.exports = {
|
||||
copy: u(require('./copy')),
|
||||
copySync: require('./copy-sync')
|
||||
}
|
||||
39
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/empty/index.js
generated
vendored
Executable file
39
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/empty/index.js
generated
vendored
Executable file
@ -0,0 +1,39 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
const fs = require('../fs')
|
||||
const path = require('path')
|
||||
const mkdir = require('../mkdirs')
|
||||
const remove = require('../remove')
|
||||
|
||||
const emptyDir = u(async function emptyDir (dir) {
|
||||
let items
|
||||
try {
|
||||
items = await fs.readdir(dir)
|
||||
} catch {
|
||||
return mkdir.mkdirs(dir)
|
||||
}
|
||||
|
||||
return Promise.all(items.map(item => remove.remove(path.join(dir, item))))
|
||||
})
|
||||
|
||||
function emptyDirSync (dir) {
|
||||
let items
|
||||
try {
|
||||
items = fs.readdirSync(dir)
|
||||
} catch {
|
||||
return mkdir.mkdirsSync(dir)
|
||||
}
|
||||
|
||||
items.forEach(item => {
|
||||
item = path.join(dir, item)
|
||||
remove.removeSync(item)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
emptyDirSync,
|
||||
emptydirSync: emptyDirSync,
|
||||
emptyDir,
|
||||
emptydir: emptyDir
|
||||
}
|
||||
69
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/file.js
generated
vendored
Executable file
69
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/file.js
generated
vendored
Executable file
@ -0,0 +1,69 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromCallback
|
||||
const path = require('path')
|
||||
const fs = require('graceful-fs')
|
||||
const mkdir = require('../mkdirs')
|
||||
|
||||
function createFile (file, callback) {
|
||||
function makeFile () {
|
||||
fs.writeFile(file, '', err => {
|
||||
if (err) return callback(err)
|
||||
callback()
|
||||
})
|
||||
}
|
||||
|
||||
fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err
|
||||
if (!err && stats.isFile()) return callback()
|
||||
const dir = path.dirname(file)
|
||||
fs.stat(dir, (err, stats) => {
|
||||
if (err) {
|
||||
// if the directory doesn't exist, make it
|
||||
if (err.code === 'ENOENT') {
|
||||
return mkdir.mkdirs(dir, err => {
|
||||
if (err) return callback(err)
|
||||
makeFile()
|
||||
})
|
||||
}
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
if (stats.isDirectory()) makeFile()
|
||||
else {
|
||||
// parent is not a directory
|
||||
// This is just to cause an internal ENOTDIR error to be thrown
|
||||
fs.readdir(dir, err => {
|
||||
if (err) return callback(err)
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function createFileSync (file) {
|
||||
let stats
|
||||
try {
|
||||
stats = fs.statSync(file)
|
||||
} catch {}
|
||||
if (stats && stats.isFile()) return
|
||||
|
||||
const dir = path.dirname(file)
|
||||
try {
|
||||
if (!fs.statSync(dir).isDirectory()) {
|
||||
// parent is not a directory
|
||||
// This is just to cause an internal ENOTDIR error to be thrown
|
||||
fs.readdirSync(dir)
|
||||
}
|
||||
} catch (err) {
|
||||
// If the stat call above failed because the directory doesn't exist, create it
|
||||
if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)
|
||||
else throw err
|
||||
}
|
||||
|
||||
fs.writeFileSync(file, '')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createFile: u(createFile),
|
||||
createFileSync
|
||||
}
|
||||
23
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/index.js
generated
vendored
Executable file
23
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/index.js
generated
vendored
Executable file
@ -0,0 +1,23 @@
|
||||
'use strict'
|
||||
|
||||
const { createFile, createFileSync } = require('./file')
|
||||
const { createLink, createLinkSync } = require('./link')
|
||||
const { createSymlink, createSymlinkSync } = require('./symlink')
|
||||
|
||||
module.exports = {
|
||||
// file
|
||||
createFile,
|
||||
createFileSync,
|
||||
ensureFile: createFile,
|
||||
ensureFileSync: createFileSync,
|
||||
// link
|
||||
createLink,
|
||||
createLinkSync,
|
||||
ensureLink: createLink,
|
||||
ensureLinkSync: createLinkSync,
|
||||
// symlink
|
||||
createSymlink,
|
||||
createSymlinkSync,
|
||||
ensureSymlink: createSymlink,
|
||||
ensureSymlinkSync: createSymlinkSync
|
||||
}
|
||||
64
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/link.js
generated
vendored
Executable file
64
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/link.js
generated
vendored
Executable file
@ -0,0 +1,64 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromCallback
|
||||
const path = require('path')
|
||||
const fs = require('graceful-fs')
|
||||
const mkdir = require('../mkdirs')
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
const { areIdentical } = require('../util/stat')
|
||||
|
||||
function createLink (srcpath, dstpath, callback) {
|
||||
function makeLink (srcpath, dstpath) {
|
||||
fs.link(srcpath, dstpath, err => {
|
||||
if (err) return callback(err)
|
||||
callback(null)
|
||||
})
|
||||
}
|
||||
|
||||
fs.lstat(dstpath, (_, dstStat) => {
|
||||
fs.lstat(srcpath, (err, srcStat) => {
|
||||
if (err) {
|
||||
err.message = err.message.replace('lstat', 'ensureLink')
|
||||
return callback(err)
|
||||
}
|
||||
if (dstStat && areIdentical(srcStat, dstStat)) return callback(null)
|
||||
|
||||
const dir = path.dirname(dstpath)
|
||||
pathExists(dir, (err, dirExists) => {
|
||||
if (err) return callback(err)
|
||||
if (dirExists) return makeLink(srcpath, dstpath)
|
||||
mkdir.mkdirs(dir, err => {
|
||||
if (err) return callback(err)
|
||||
makeLink(srcpath, dstpath)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function createLinkSync (srcpath, dstpath) {
|
||||
let dstStat
|
||||
try {
|
||||
dstStat = fs.lstatSync(dstpath)
|
||||
} catch {}
|
||||
|
||||
try {
|
||||
const srcStat = fs.lstatSync(srcpath)
|
||||
if (dstStat && areIdentical(srcStat, dstStat)) return
|
||||
} catch (err) {
|
||||
err.message = err.message.replace('lstat', 'ensureLink')
|
||||
throw err
|
||||
}
|
||||
|
||||
const dir = path.dirname(dstpath)
|
||||
const dirExists = fs.existsSync(dir)
|
||||
if (dirExists) return fs.linkSync(srcpath, dstpath)
|
||||
mkdir.mkdirsSync(dir)
|
||||
|
||||
return fs.linkSync(srcpath, dstpath)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createLink: u(createLink),
|
||||
createLinkSync
|
||||
}
|
||||
99
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/symlink-paths.js
generated
vendored
Executable file
99
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/symlink-paths.js
generated
vendored
Executable file
@ -0,0 +1,99 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const fs = require('graceful-fs')
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
|
||||
/**
|
||||
* Function that returns two types of paths, one relative to symlink, and one
|
||||
* relative to the current working directory. Checks if path is absolute or
|
||||
* relative. If the path is relative, this function checks if the path is
|
||||
* relative to symlink or relative to current working directory. This is an
|
||||
* initiative to find a smarter `srcpath` to supply when building symlinks.
|
||||
* This allows you to determine which path to use out of one of three possible
|
||||
* types of source paths. The first is an absolute path. This is detected by
|
||||
* `path.isAbsolute()`. When an absolute path is provided, it is checked to
|
||||
* see if it exists. If it does it's used, if not an error is returned
|
||||
* (callback)/ thrown (sync). The other two options for `srcpath` are a
|
||||
* relative url. By default Node's `fs.symlink` works by creating a symlink
|
||||
* using `dstpath` and expects the `srcpath` to be relative to the newly
|
||||
* created symlink. If you provide a `srcpath` that does not exist on the file
|
||||
* system it results in a broken symlink. To minimize this, the function
|
||||
* checks to see if the 'relative to symlink' source file exists, and if it
|
||||
* does it will use it. If it does not, it checks if there's a file that
|
||||
* exists that is relative to the current working directory, if does its used.
|
||||
* This preserves the expectations of the original fs.symlink spec and adds
|
||||
* the ability to pass in `relative to current working direcotry` paths.
|
||||
*/
|
||||
|
||||
function symlinkPaths (srcpath, dstpath, callback) {
|
||||
if (path.isAbsolute(srcpath)) {
|
||||
return fs.lstat(srcpath, (err) => {
|
||||
if (err) {
|
||||
err.message = err.message.replace('lstat', 'ensureSymlink')
|
||||
return callback(err)
|
||||
}
|
||||
return callback(null, {
|
||||
toCwd: srcpath,
|
||||
toDst: srcpath
|
||||
})
|
||||
})
|
||||
} else {
|
||||
const dstdir = path.dirname(dstpath)
|
||||
const relativeToDst = path.join(dstdir, srcpath)
|
||||
return pathExists(relativeToDst, (err, exists) => {
|
||||
if (err) return callback(err)
|
||||
if (exists) {
|
||||
return callback(null, {
|
||||
toCwd: relativeToDst,
|
||||
toDst: srcpath
|
||||
})
|
||||
} else {
|
||||
return fs.lstat(srcpath, (err) => {
|
||||
if (err) {
|
||||
err.message = err.message.replace('lstat', 'ensureSymlink')
|
||||
return callback(err)
|
||||
}
|
||||
return callback(null, {
|
||||
toCwd: srcpath,
|
||||
toDst: path.relative(dstdir, srcpath)
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function symlinkPathsSync (srcpath, dstpath) {
|
||||
let exists
|
||||
if (path.isAbsolute(srcpath)) {
|
||||
exists = fs.existsSync(srcpath)
|
||||
if (!exists) throw new Error('absolute srcpath does not exist')
|
||||
return {
|
||||
toCwd: srcpath,
|
||||
toDst: srcpath
|
||||
}
|
||||
} else {
|
||||
const dstdir = path.dirname(dstpath)
|
||||
const relativeToDst = path.join(dstdir, srcpath)
|
||||
exists = fs.existsSync(relativeToDst)
|
||||
if (exists) {
|
||||
return {
|
||||
toCwd: relativeToDst,
|
||||
toDst: srcpath
|
||||
}
|
||||
} else {
|
||||
exists = fs.existsSync(srcpath)
|
||||
if (!exists) throw new Error('relative srcpath does not exist')
|
||||
return {
|
||||
toCwd: srcpath,
|
||||
toDst: path.relative(dstdir, srcpath)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
symlinkPaths,
|
||||
symlinkPathsSync
|
||||
}
|
||||
31
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/symlink-type.js
generated
vendored
Executable file
31
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/symlink-type.js
generated
vendored
Executable file
@ -0,0 +1,31 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
|
||||
function symlinkType (srcpath, type, callback) {
|
||||
callback = (typeof type === 'function') ? type : callback
|
||||
type = (typeof type === 'function') ? false : type
|
||||
if (type) return callback(null, type)
|
||||
fs.lstat(srcpath, (err, stats) => {
|
||||
if (err) return callback(null, 'file')
|
||||
type = (stats && stats.isDirectory()) ? 'dir' : 'file'
|
||||
callback(null, type)
|
||||
})
|
||||
}
|
||||
|
||||
function symlinkTypeSync (srcpath, type) {
|
||||
let stats
|
||||
|
||||
if (type) return type
|
||||
try {
|
||||
stats = fs.lstatSync(srcpath)
|
||||
} catch {
|
||||
return 'file'
|
||||
}
|
||||
return (stats && stats.isDirectory()) ? 'dir' : 'file'
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
symlinkType,
|
||||
symlinkTypeSync
|
||||
}
|
||||
82
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/symlink.js
generated
vendored
Executable file
82
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/ensure/symlink.js
generated
vendored
Executable file
@ -0,0 +1,82 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromCallback
|
||||
const path = require('path')
|
||||
const fs = require('../fs')
|
||||
const _mkdirs = require('../mkdirs')
|
||||
const mkdirs = _mkdirs.mkdirs
|
||||
const mkdirsSync = _mkdirs.mkdirsSync
|
||||
|
||||
const _symlinkPaths = require('./symlink-paths')
|
||||
const symlinkPaths = _symlinkPaths.symlinkPaths
|
||||
const symlinkPathsSync = _symlinkPaths.symlinkPathsSync
|
||||
|
||||
const _symlinkType = require('./symlink-type')
|
||||
const symlinkType = _symlinkType.symlinkType
|
||||
const symlinkTypeSync = _symlinkType.symlinkTypeSync
|
||||
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
|
||||
const { areIdentical } = require('../util/stat')
|
||||
|
||||
function createSymlink (srcpath, dstpath, type, callback) {
|
||||
callback = (typeof type === 'function') ? type : callback
|
||||
type = (typeof type === 'function') ? false : type
|
||||
|
||||
fs.lstat(dstpath, (err, stats) => {
|
||||
if (!err && stats.isSymbolicLink()) {
|
||||
Promise.all([
|
||||
fs.stat(srcpath),
|
||||
fs.stat(dstpath)
|
||||
]).then(([srcStat, dstStat]) => {
|
||||
if (areIdentical(srcStat, dstStat)) return callback(null)
|
||||
_createSymlink(srcpath, dstpath, type, callback)
|
||||
})
|
||||
} else _createSymlink(srcpath, dstpath, type, callback)
|
||||
})
|
||||
}
|
||||
|
||||
function _createSymlink (srcpath, dstpath, type, callback) {
|
||||
symlinkPaths(srcpath, dstpath, (err, relative) => {
|
||||
if (err) return callback(err)
|
||||
srcpath = relative.toDst
|
||||
symlinkType(relative.toCwd, type, (err, type) => {
|
||||
if (err) return callback(err)
|
||||
const dir = path.dirname(dstpath)
|
||||
pathExists(dir, (err, dirExists) => {
|
||||
if (err) return callback(err)
|
||||
if (dirExists) return fs.symlink(srcpath, dstpath, type, callback)
|
||||
mkdirs(dir, err => {
|
||||
if (err) return callback(err)
|
||||
fs.symlink(srcpath, dstpath, type, callback)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function createSymlinkSync (srcpath, dstpath, type) {
|
||||
let stats
|
||||
try {
|
||||
stats = fs.lstatSync(dstpath)
|
||||
} catch {}
|
||||
if (stats && stats.isSymbolicLink()) {
|
||||
const srcStat = fs.statSync(srcpath)
|
||||
const dstStat = fs.statSync(dstpath)
|
||||
if (areIdentical(srcStat, dstStat)) return
|
||||
}
|
||||
|
||||
const relative = symlinkPathsSync(srcpath, dstpath)
|
||||
srcpath = relative.toDst
|
||||
type = symlinkTypeSync(relative.toCwd, type)
|
||||
const dir = path.dirname(dstpath)
|
||||
const exists = fs.existsSync(dir)
|
||||
if (exists) return fs.symlinkSync(srcpath, dstpath, type)
|
||||
mkdirsSync(dir)
|
||||
return fs.symlinkSync(srcpath, dstpath, type)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createSymlink: u(createSymlink),
|
||||
createSymlinkSync
|
||||
}
|
||||
128
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/fs/index.js
generated
vendored
Executable file
128
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/fs/index.js
generated
vendored
Executable file
@ -0,0 +1,128 @@
|
||||
'use strict'
|
||||
// This is adapted from https://github.com/normalize/mz
|
||||
// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors
|
||||
const u = require('universalify').fromCallback
|
||||
const fs = require('graceful-fs')
|
||||
|
||||
const api = [
|
||||
'access',
|
||||
'appendFile',
|
||||
'chmod',
|
||||
'chown',
|
||||
'close',
|
||||
'copyFile',
|
||||
'fchmod',
|
||||
'fchown',
|
||||
'fdatasync',
|
||||
'fstat',
|
||||
'fsync',
|
||||
'ftruncate',
|
||||
'futimes',
|
||||
'lchmod',
|
||||
'lchown',
|
||||
'link',
|
||||
'lstat',
|
||||
'mkdir',
|
||||
'mkdtemp',
|
||||
'open',
|
||||
'opendir',
|
||||
'readdir',
|
||||
'readFile',
|
||||
'readlink',
|
||||
'realpath',
|
||||
'rename',
|
||||
'rm',
|
||||
'rmdir',
|
||||
'stat',
|
||||
'symlink',
|
||||
'truncate',
|
||||
'unlink',
|
||||
'utimes',
|
||||
'writeFile'
|
||||
].filter(key => {
|
||||
// Some commands are not available on some systems. Ex:
|
||||
// fs.opendir was added in Node.js v12.12.0
|
||||
// fs.rm was added in Node.js v14.14.0
|
||||
// fs.lchown is not available on at least some Linux
|
||||
return typeof fs[key] === 'function'
|
||||
})
|
||||
|
||||
// Export cloned fs:
|
||||
Object.assign(exports, fs)
|
||||
|
||||
// Universalify async methods:
|
||||
api.forEach(method => {
|
||||
exports[method] = u(fs[method])
|
||||
})
|
||||
|
||||
// We differ from mz/fs in that we still ship the old, broken, fs.exists()
|
||||
// since we are a drop-in replacement for the native module
|
||||
exports.exists = function (filename, callback) {
|
||||
if (typeof callback === 'function') {
|
||||
return fs.exists(filename, callback)
|
||||
}
|
||||
return new Promise(resolve => {
|
||||
return fs.exists(filename, resolve)
|
||||
})
|
||||
}
|
||||
|
||||
// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args
|
||||
|
||||
exports.read = function (fd, buffer, offset, length, position, callback) {
|
||||
if (typeof callback === 'function') {
|
||||
return fs.read(fd, buffer, offset, length, position, callback)
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {
|
||||
if (err) return reject(err)
|
||||
resolve({ bytesRead, buffer })
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Function signature can be
|
||||
// fs.write(fd, buffer[, offset[, length[, position]]], callback)
|
||||
// OR
|
||||
// fs.write(fd, string[, position[, encoding]], callback)
|
||||
// We need to handle both cases, so we use ...args
|
||||
exports.write = function (fd, buffer, ...args) {
|
||||
if (typeof args[args.length - 1] === 'function') {
|
||||
return fs.write(fd, buffer, ...args)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => {
|
||||
if (err) return reject(err)
|
||||
resolve({ bytesWritten, buffer })
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// fs.writev only available in Node v12.9.0+
|
||||
if (typeof fs.writev === 'function') {
|
||||
// Function signature is
|
||||
// s.writev(fd, buffers[, position], callback)
|
||||
// We need to handle the optional arg, so we use ...args
|
||||
exports.writev = function (fd, buffers, ...args) {
|
||||
if (typeof args[args.length - 1] === 'function') {
|
||||
return fs.writev(fd, buffers, ...args)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {
|
||||
if (err) return reject(err)
|
||||
resolve({ bytesWritten, buffers })
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// fs.realpath.native sometimes not available if fs is monkey-patched
|
||||
if (typeof fs.realpath.native === 'function') {
|
||||
exports.realpath.native = u(fs.realpath.native)
|
||||
} else {
|
||||
process.emitWarning(
|
||||
'fs.realpath.native is not a function. Is fs being monkey-patched?',
|
||||
'Warning', 'fs-extra-WARN0003'
|
||||
)
|
||||
}
|
||||
16
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/index.js
generated
vendored
Executable file
16
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/index.js
generated
vendored
Executable file
@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
// Export promiseified graceful-fs:
|
||||
...require('./fs'),
|
||||
// Export extra methods:
|
||||
...require('./copy'),
|
||||
...require('./empty'),
|
||||
...require('./ensure'),
|
||||
...require('./json'),
|
||||
...require('./mkdirs'),
|
||||
...require('./move'),
|
||||
...require('./output-file'),
|
||||
...require('./path-exists'),
|
||||
...require('./remove')
|
||||
}
|
||||
16
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/json/index.js
generated
vendored
Executable file
16
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/json/index.js
generated
vendored
Executable file
@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
const jsonFile = require('./jsonfile')
|
||||
|
||||
jsonFile.outputJson = u(require('./output-json'))
|
||||
jsonFile.outputJsonSync = require('./output-json-sync')
|
||||
// aliases
|
||||
jsonFile.outputJSON = jsonFile.outputJson
|
||||
jsonFile.outputJSONSync = jsonFile.outputJsonSync
|
||||
jsonFile.writeJSON = jsonFile.writeJson
|
||||
jsonFile.writeJSONSync = jsonFile.writeJsonSync
|
||||
jsonFile.readJSON = jsonFile.readJson
|
||||
jsonFile.readJSONSync = jsonFile.readJsonSync
|
||||
|
||||
module.exports = jsonFile
|
||||
11
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/json/jsonfile.js
generated
vendored
Executable file
11
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/json/jsonfile.js
generated
vendored
Executable file
@ -0,0 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const jsonFile = require('jsonfile')
|
||||
|
||||
module.exports = {
|
||||
// jsonfile exports
|
||||
readJson: jsonFile.readFile,
|
||||
readJsonSync: jsonFile.readFileSync,
|
||||
writeJson: jsonFile.writeFile,
|
||||
writeJsonSync: jsonFile.writeFileSync
|
||||
}
|
||||
12
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/json/output-json-sync.js
generated
vendored
Executable file
12
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/json/output-json-sync.js
generated
vendored
Executable file
@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const { stringify } = require('jsonfile/utils')
|
||||
const { outputFileSync } = require('../output-file')
|
||||
|
||||
function outputJsonSync (file, data, options) {
|
||||
const str = stringify(data, options)
|
||||
|
||||
outputFileSync(file, str, options)
|
||||
}
|
||||
|
||||
module.exports = outputJsonSync
|
||||
12
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/json/output-json.js
generated
vendored
Executable file
12
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/json/output-json.js
generated
vendored
Executable file
@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const { stringify } = require('jsonfile/utils')
|
||||
const { outputFile } = require('../output-file')
|
||||
|
||||
async function outputJson (file, data, options = {}) {
|
||||
const str = stringify(data, options)
|
||||
|
||||
await outputFile(file, str, options)
|
||||
}
|
||||
|
||||
module.exports = outputJson
|
||||
14
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/mkdirs/index.js
generated
vendored
Executable file
14
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/mkdirs/index.js
generated
vendored
Executable file
@ -0,0 +1,14 @@
|
||||
'use strict'
|
||||
const u = require('universalify').fromPromise
|
||||
const { makeDir: _makeDir, makeDirSync } = require('./make-dir')
|
||||
const makeDir = u(_makeDir)
|
||||
|
||||
module.exports = {
|
||||
mkdirs: makeDir,
|
||||
mkdirsSync: makeDirSync,
|
||||
// alias
|
||||
mkdirp: makeDir,
|
||||
mkdirpSync: makeDirSync,
|
||||
ensureDir: makeDir,
|
||||
ensureDirSync: makeDirSync
|
||||
}
|
||||
27
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/mkdirs/make-dir.js
generated
vendored
Executable file
27
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/mkdirs/make-dir.js
generated
vendored
Executable file
@ -0,0 +1,27 @@
|
||||
'use strict'
|
||||
const fs = require('../fs')
|
||||
const { checkPath } = require('./utils')
|
||||
|
||||
const getMode = options => {
|
||||
const defaults = { mode: 0o777 }
|
||||
if (typeof options === 'number') return options
|
||||
return ({ ...defaults, ...options }).mode
|
||||
}
|
||||
|
||||
module.exports.makeDir = async (dir, options) => {
|
||||
checkPath(dir)
|
||||
|
||||
return fs.mkdir(dir, {
|
||||
mode: getMode(options),
|
||||
recursive: true
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.makeDirSync = (dir, options) => {
|
||||
checkPath(dir)
|
||||
|
||||
return fs.mkdirSync(dir, {
|
||||
mode: getMode(options),
|
||||
recursive: true
|
||||
})
|
||||
}
|
||||
21
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/mkdirs/utils.js
generated
vendored
Executable file
21
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/mkdirs/utils.js
generated
vendored
Executable file
@ -0,0 +1,21 @@
|
||||
// Adapted from https://github.com/sindresorhus/make-dir
|
||||
// Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
'use strict'
|
||||
const path = require('path')
|
||||
|
||||
// https://github.com/nodejs/node/issues/8987
|
||||
// https://github.com/libuv/libuv/pull/1088
|
||||
module.exports.checkPath = function checkPath (pth) {
|
||||
if (process.platform === 'win32') {
|
||||
const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, ''))
|
||||
|
||||
if (pathHasInvalidWinCharacters) {
|
||||
const error = new Error(`Path contains invalid characters: ${pth}`)
|
||||
error.code = 'EINVAL'
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
7
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/move/index.js
generated
vendored
Executable file
7
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/move/index.js
generated
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromCallback
|
||||
module.exports = {
|
||||
move: u(require('./move')),
|
||||
moveSync: require('./move-sync')
|
||||
}
|
||||
54
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/move/move-sync.js
generated
vendored
Executable file
54
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/move/move-sync.js
generated
vendored
Executable file
@ -0,0 +1,54 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const copySync = require('../copy').copySync
|
||||
const removeSync = require('../remove').removeSync
|
||||
const mkdirpSync = require('../mkdirs').mkdirpSync
|
||||
const stat = require('../util/stat')
|
||||
|
||||
function moveSync (src, dest, opts) {
|
||||
opts = opts || {}
|
||||
const overwrite = opts.overwrite || opts.clobber || false
|
||||
|
||||
const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts)
|
||||
stat.checkParentPathsSync(src, srcStat, dest, 'move')
|
||||
if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest))
|
||||
return doRename(src, dest, overwrite, isChangingCase)
|
||||
}
|
||||
|
||||
function isParentRoot (dest) {
|
||||
const parent = path.dirname(dest)
|
||||
const parsedPath = path.parse(parent)
|
||||
return parsedPath.root === parent
|
||||
}
|
||||
|
||||
function doRename (src, dest, overwrite, isChangingCase) {
|
||||
if (isChangingCase) return rename(src, dest, overwrite)
|
||||
if (overwrite) {
|
||||
removeSync(dest)
|
||||
return rename(src, dest, overwrite)
|
||||
}
|
||||
if (fs.existsSync(dest)) throw new Error('dest already exists.')
|
||||
return rename(src, dest, overwrite)
|
||||
}
|
||||
|
||||
function rename (src, dest, overwrite) {
|
||||
try {
|
||||
fs.renameSync(src, dest)
|
||||
} catch (err) {
|
||||
if (err.code !== 'EXDEV') throw err
|
||||
return moveAcrossDevice(src, dest, overwrite)
|
||||
}
|
||||
}
|
||||
|
||||
function moveAcrossDevice (src, dest, overwrite) {
|
||||
const opts = {
|
||||
overwrite,
|
||||
errorOnExist: true
|
||||
}
|
||||
copySync(src, dest, opts)
|
||||
return removeSync(src)
|
||||
}
|
||||
|
||||
module.exports = moveSync
|
||||
75
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/move/move.js
generated
vendored
Executable file
75
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/move/move.js
generated
vendored
Executable file
@ -0,0 +1,75 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const copy = require('../copy').copy
|
||||
const remove = require('../remove').remove
|
||||
const mkdirp = require('../mkdirs').mkdirp
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
const stat = require('../util/stat')
|
||||
|
||||
function move (src, dest, opts, cb) {
|
||||
if (typeof opts === 'function') {
|
||||
cb = opts
|
||||
opts = {}
|
||||
}
|
||||
|
||||
opts = opts || {}
|
||||
|
||||
const overwrite = opts.overwrite || opts.clobber || false
|
||||
|
||||
stat.checkPaths(src, dest, 'move', opts, (err, stats) => {
|
||||
if (err) return cb(err)
|
||||
const { srcStat, isChangingCase = false } = stats
|
||||
stat.checkParentPaths(src, srcStat, dest, 'move', err => {
|
||||
if (err) return cb(err)
|
||||
if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb)
|
||||
mkdirp(path.dirname(dest), err => {
|
||||
if (err) return cb(err)
|
||||
return doRename(src, dest, overwrite, isChangingCase, cb)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function isParentRoot (dest) {
|
||||
const parent = path.dirname(dest)
|
||||
const parsedPath = path.parse(parent)
|
||||
return parsedPath.root === parent
|
||||
}
|
||||
|
||||
function doRename (src, dest, overwrite, isChangingCase, cb) {
|
||||
if (isChangingCase) return rename(src, dest, overwrite, cb)
|
||||
if (overwrite) {
|
||||
return remove(dest, err => {
|
||||
if (err) return cb(err)
|
||||
return rename(src, dest, overwrite, cb)
|
||||
})
|
||||
}
|
||||
pathExists(dest, (err, destExists) => {
|
||||
if (err) return cb(err)
|
||||
if (destExists) return cb(new Error('dest already exists.'))
|
||||
return rename(src, dest, overwrite, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function rename (src, dest, overwrite, cb) {
|
||||
fs.rename(src, dest, err => {
|
||||
if (!err) return cb()
|
||||
if (err.code !== 'EXDEV') return cb(err)
|
||||
return moveAcrossDevice(src, dest, overwrite, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function moveAcrossDevice (src, dest, overwrite, cb) {
|
||||
const opts = {
|
||||
overwrite,
|
||||
errorOnExist: true
|
||||
}
|
||||
copy(src, dest, opts, err => {
|
||||
if (err) return cb(err)
|
||||
return remove(src, cb)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = move
|
||||
40
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/output-file/index.js
generated
vendored
Executable file
40
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/output-file/index.js
generated
vendored
Executable file
@ -0,0 +1,40 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromCallback
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const mkdir = require('../mkdirs')
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
|
||||
function outputFile (file, data, encoding, callback) {
|
||||
if (typeof encoding === 'function') {
|
||||
callback = encoding
|
||||
encoding = 'utf8'
|
||||
}
|
||||
|
||||
const dir = path.dirname(file)
|
||||
pathExists(dir, (err, itDoes) => {
|
||||
if (err) return callback(err)
|
||||
if (itDoes) return fs.writeFile(file, data, encoding, callback)
|
||||
|
||||
mkdir.mkdirs(dir, err => {
|
||||
if (err) return callback(err)
|
||||
|
||||
fs.writeFile(file, data, encoding, callback)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function outputFileSync (file, ...args) {
|
||||
const dir = path.dirname(file)
|
||||
if (fs.existsSync(dir)) {
|
||||
return fs.writeFileSync(file, ...args)
|
||||
}
|
||||
mkdir.mkdirsSync(dir)
|
||||
fs.writeFileSync(file, ...args)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
outputFile: u(outputFile),
|
||||
outputFileSync
|
||||
}
|
||||
12
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/path-exists/index.js
generated
vendored
Executable file
12
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/path-exists/index.js
generated
vendored
Executable file
@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
const u = require('universalify').fromPromise
|
||||
const fs = require('../fs')
|
||||
|
||||
function pathExists (path) {
|
||||
return fs.access(path).then(() => true).catch(() => false)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
pathExists: u(pathExists),
|
||||
pathExistsSync: fs.existsSync
|
||||
}
|
||||
22
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/remove/index.js
generated
vendored
Executable file
22
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/remove/index.js
generated
vendored
Executable file
@ -0,0 +1,22 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const u = require('universalify').fromCallback
|
||||
const rimraf = require('./rimraf')
|
||||
|
||||
function remove (path, callback) {
|
||||
// Node 14.14.0+
|
||||
if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback)
|
||||
rimraf(path, callback)
|
||||
}
|
||||
|
||||
function removeSync (path) {
|
||||
// Node 14.14.0+
|
||||
if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true })
|
||||
rimraf.sync(path)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
remove: u(remove),
|
||||
removeSync
|
||||
}
|
||||
302
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/remove/rimraf.js
generated
vendored
Executable file
302
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/remove/rimraf.js
generated
vendored
Executable file
@ -0,0 +1,302 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const assert = require('assert')
|
||||
|
||||
const isWindows = (process.platform === 'win32')
|
||||
|
||||
function defaults (options) {
|
||||
const methods = [
|
||||
'unlink',
|
||||
'chmod',
|
||||
'stat',
|
||||
'lstat',
|
||||
'rmdir',
|
||||
'readdir'
|
||||
]
|
||||
methods.forEach(m => {
|
||||
options[m] = options[m] || fs[m]
|
||||
m = m + 'Sync'
|
||||
options[m] = options[m] || fs[m]
|
||||
})
|
||||
|
||||
options.maxBusyTries = options.maxBusyTries || 3
|
||||
}
|
||||
|
||||
function rimraf (p, options, cb) {
|
||||
let busyTries = 0
|
||||
|
||||
if (typeof options === 'function') {
|
||||
cb = options
|
||||
options = {}
|
||||
}
|
||||
|
||||
assert(p, 'rimraf: missing path')
|
||||
assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')
|
||||
assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required')
|
||||
assert(options, 'rimraf: invalid options argument provided')
|
||||
assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')
|
||||
|
||||
defaults(options)
|
||||
|
||||
rimraf_(p, options, function CB (er) {
|
||||
if (er) {
|
||||
if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') &&
|
||||
busyTries < options.maxBusyTries) {
|
||||
busyTries++
|
||||
const time = busyTries * 100
|
||||
// try again, with the same exact callback as this one.
|
||||
return setTimeout(() => rimraf_(p, options, CB), time)
|
||||
}
|
||||
|
||||
// already gone
|
||||
if (er.code === 'ENOENT') er = null
|
||||
}
|
||||
|
||||
cb(er)
|
||||
})
|
||||
}
|
||||
|
||||
// Two possible strategies.
|
||||
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
|
||||
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
|
||||
//
|
||||
// Both result in an extra syscall when you guess wrong. However, there
|
||||
// are likely far more normal files in the world than directories. This
|
||||
// is based on the assumption that a the average number of files per
|
||||
// directory is >= 1.
|
||||
//
|
||||
// If anyone ever complains about this, then I guess the strategy could
|
||||
// be made configurable somehow. But until then, YAGNI.
|
||||
function rimraf_ (p, options, cb) {
|
||||
assert(p)
|
||||
assert(options)
|
||||
assert(typeof cb === 'function')
|
||||
|
||||
// sunos lets the root user unlink directories, which is... weird.
|
||||
// so we have to lstat here and make sure it's not a dir.
|
||||
options.lstat(p, (er, st) => {
|
||||
if (er && er.code === 'ENOENT') {
|
||||
return cb(null)
|
||||
}
|
||||
|
||||
// Windows can EPERM on stat. Life is suffering.
|
||||
if (er && er.code === 'EPERM' && isWindows) {
|
||||
return fixWinEPERM(p, options, er, cb)
|
||||
}
|
||||
|
||||
if (st && st.isDirectory()) {
|
||||
return rmdir(p, options, er, cb)
|
||||
}
|
||||
|
||||
options.unlink(p, er => {
|
||||
if (er) {
|
||||
if (er.code === 'ENOENT') {
|
||||
return cb(null)
|
||||
}
|
||||
if (er.code === 'EPERM') {
|
||||
return (isWindows)
|
||||
? fixWinEPERM(p, options, er, cb)
|
||||
: rmdir(p, options, er, cb)
|
||||
}
|
||||
if (er.code === 'EISDIR') {
|
||||
return rmdir(p, options, er, cb)
|
||||
}
|
||||
}
|
||||
return cb(er)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function fixWinEPERM (p, options, er, cb) {
|
||||
assert(p)
|
||||
assert(options)
|
||||
assert(typeof cb === 'function')
|
||||
|
||||
options.chmod(p, 0o666, er2 => {
|
||||
if (er2) {
|
||||
cb(er2.code === 'ENOENT' ? null : er)
|
||||
} else {
|
||||
options.stat(p, (er3, stats) => {
|
||||
if (er3) {
|
||||
cb(er3.code === 'ENOENT' ? null : er)
|
||||
} else if (stats.isDirectory()) {
|
||||
rmdir(p, options, er, cb)
|
||||
} else {
|
||||
options.unlink(p, cb)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function fixWinEPERMSync (p, options, er) {
|
||||
let stats
|
||||
|
||||
assert(p)
|
||||
assert(options)
|
||||
|
||||
try {
|
||||
options.chmodSync(p, 0o666)
|
||||
} catch (er2) {
|
||||
if (er2.code === 'ENOENT') {
|
||||
return
|
||||
} else {
|
||||
throw er
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
stats = options.statSync(p)
|
||||
} catch (er3) {
|
||||
if (er3.code === 'ENOENT') {
|
||||
return
|
||||
} else {
|
||||
throw er
|
||||
}
|
||||
}
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
rmdirSync(p, options, er)
|
||||
} else {
|
||||
options.unlinkSync(p)
|
||||
}
|
||||
}
|
||||
|
||||
function rmdir (p, options, originalEr, cb) {
|
||||
assert(p)
|
||||
assert(options)
|
||||
assert(typeof cb === 'function')
|
||||
|
||||
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
|
||||
// if we guessed wrong, and it's not a directory, then
|
||||
// raise the original error.
|
||||
options.rmdir(p, er => {
|
||||
if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) {
|
||||
rmkids(p, options, cb)
|
||||
} else if (er && er.code === 'ENOTDIR') {
|
||||
cb(originalEr)
|
||||
} else {
|
||||
cb(er)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function rmkids (p, options, cb) {
|
||||
assert(p)
|
||||
assert(options)
|
||||
assert(typeof cb === 'function')
|
||||
|
||||
options.readdir(p, (er, files) => {
|
||||
if (er) return cb(er)
|
||||
|
||||
let n = files.length
|
||||
let errState
|
||||
|
||||
if (n === 0) return options.rmdir(p, cb)
|
||||
|
||||
files.forEach(f => {
|
||||
rimraf(path.join(p, f), options, er => {
|
||||
if (errState) {
|
||||
return
|
||||
}
|
||||
if (er) return cb(errState = er)
|
||||
if (--n === 0) {
|
||||
options.rmdir(p, cb)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// this looks simpler, and is strictly *faster*, but will
|
||||
// tie up the JavaScript thread and fail on excessively
|
||||
// deep directory trees.
|
||||
function rimrafSync (p, options) {
|
||||
let st
|
||||
|
||||
options = options || {}
|
||||
defaults(options)
|
||||
|
||||
assert(p, 'rimraf: missing path')
|
||||
assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')
|
||||
assert(options, 'rimraf: missing options')
|
||||
assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')
|
||||
|
||||
try {
|
||||
st = options.lstatSync(p)
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT') {
|
||||
return
|
||||
}
|
||||
|
||||
// Windows can EPERM on stat. Life is suffering.
|
||||
if (er.code === 'EPERM' && isWindows) {
|
||||
fixWinEPERMSync(p, options, er)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// sunos lets the root user unlink directories, which is... weird.
|
||||
if (st && st.isDirectory()) {
|
||||
rmdirSync(p, options, null)
|
||||
} else {
|
||||
options.unlinkSync(p)
|
||||
}
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT') {
|
||||
return
|
||||
} else if (er.code === 'EPERM') {
|
||||
return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
|
||||
} else if (er.code !== 'EISDIR') {
|
||||
throw er
|
||||
}
|
||||
rmdirSync(p, options, er)
|
||||
}
|
||||
}
|
||||
|
||||
function rmdirSync (p, options, originalEr) {
|
||||
assert(p)
|
||||
assert(options)
|
||||
|
||||
try {
|
||||
options.rmdirSync(p)
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOTDIR') {
|
||||
throw originalEr
|
||||
} else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') {
|
||||
rmkidsSync(p, options)
|
||||
} else if (er.code !== 'ENOENT') {
|
||||
throw er
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function rmkidsSync (p, options) {
|
||||
assert(p)
|
||||
assert(options)
|
||||
options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
|
||||
|
||||
if (isWindows) {
|
||||
// We only end up here once we got ENOTEMPTY at least once, and
|
||||
// at this point, we are guaranteed to have removed all the kids.
|
||||
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
|
||||
// try really hard to delete stuff on windows, because it has a
|
||||
// PROFOUNDLY annoying habit of not closing handles promptly when
|
||||
// files are deleted, resulting in spurious ENOTEMPTY errors.
|
||||
const startTime = Date.now()
|
||||
do {
|
||||
try {
|
||||
const ret = options.rmdirSync(p, options)
|
||||
return ret
|
||||
} catch {}
|
||||
} while (Date.now() - startTime < 500) // give up after 500ms
|
||||
} else {
|
||||
const ret = options.rmdirSync(p, options)
|
||||
return ret
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = rimraf
|
||||
rimraf.sync = rimrafSync
|
||||
154
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/util/stat.js
generated
vendored
Executable file
154
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/util/stat.js
generated
vendored
Executable file
@ -0,0 +1,154 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('../fs')
|
||||
const path = require('path')
|
||||
const util = require('util')
|
||||
|
||||
function getStats (src, dest, opts) {
|
||||
const statFunc = opts.dereference
|
||||
? (file) => fs.stat(file, { bigint: true })
|
||||
: (file) => fs.lstat(file, { bigint: true })
|
||||
return Promise.all([
|
||||
statFunc(src),
|
||||
statFunc(dest).catch(err => {
|
||||
if (err.code === 'ENOENT') return null
|
||||
throw err
|
||||
})
|
||||
]).then(([srcStat, destStat]) => ({ srcStat, destStat }))
|
||||
}
|
||||
|
||||
function getStatsSync (src, dest, opts) {
|
||||
let destStat
|
||||
const statFunc = opts.dereference
|
||||
? (file) => fs.statSync(file, { bigint: true })
|
||||
: (file) => fs.lstatSync(file, { bigint: true })
|
||||
const srcStat = statFunc(src)
|
||||
try {
|
||||
destStat = statFunc(dest)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') return { srcStat, destStat: null }
|
||||
throw err
|
||||
}
|
||||
return { srcStat, destStat }
|
||||
}
|
||||
|
||||
function checkPaths (src, dest, funcName, opts, cb) {
|
||||
util.callbackify(getStats)(src, dest, opts, (err, stats) => {
|
||||
if (err) return cb(err)
|
||||
const { srcStat, destStat } = stats
|
||||
|
||||
if (destStat) {
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
const srcBaseName = path.basename(src)
|
||||
const destBaseName = path.basename(dest)
|
||||
if (funcName === 'move' &&
|
||||
srcBaseName !== destBaseName &&
|
||||
srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {
|
||||
return cb(null, { srcStat, destStat, isChangingCase: true })
|
||||
}
|
||||
return cb(new Error('Source and destination must not be the same.'))
|
||||
}
|
||||
if (srcStat.isDirectory() && !destStat.isDirectory()) {
|
||||
return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`))
|
||||
}
|
||||
if (!srcStat.isDirectory() && destStat.isDirectory()) {
|
||||
return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`))
|
||||
}
|
||||
}
|
||||
|
||||
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
||||
return cb(new Error(errMsg(src, dest, funcName)))
|
||||
}
|
||||
return cb(null, { srcStat, destStat })
|
||||
})
|
||||
}
|
||||
|
||||
function checkPathsSync (src, dest, funcName, opts) {
|
||||
const { srcStat, destStat } = getStatsSync(src, dest, opts)
|
||||
|
||||
if (destStat) {
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
const srcBaseName = path.basename(src)
|
||||
const destBaseName = path.basename(dest)
|
||||
if (funcName === 'move' &&
|
||||
srcBaseName !== destBaseName &&
|
||||
srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {
|
||||
return { srcStat, destStat, isChangingCase: true }
|
||||
}
|
||||
throw new Error('Source and destination must not be the same.')
|
||||
}
|
||||
if (srcStat.isDirectory() && !destStat.isDirectory()) {
|
||||
throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
|
||||
}
|
||||
if (!srcStat.isDirectory() && destStat.isDirectory()) {
|
||||
throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)
|
||||
}
|
||||
}
|
||||
|
||||
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
||||
throw new Error(errMsg(src, dest, funcName))
|
||||
}
|
||||
return { srcStat, destStat }
|
||||
}
|
||||
|
||||
// recursively check if dest parent is a subdirectory of src.
|
||||
// It works for all file types including symlinks since it
|
||||
// checks the src and dest inodes. It starts from the deepest
|
||||
// parent and stops once it reaches the src parent or the root path.
|
||||
function checkParentPaths (src, srcStat, dest, funcName, cb) {
|
||||
const srcParent = path.resolve(path.dirname(src))
|
||||
const destParent = path.resolve(path.dirname(dest))
|
||||
if (destParent === srcParent || destParent === path.parse(destParent).root) return cb()
|
||||
fs.stat(destParent, { bigint: true }, (err, destStat) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') return cb()
|
||||
return cb(err)
|
||||
}
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
return cb(new Error(errMsg(src, dest, funcName)))
|
||||
}
|
||||
return checkParentPaths(src, srcStat, destParent, funcName, cb)
|
||||
})
|
||||
}
|
||||
|
||||
function checkParentPathsSync (src, srcStat, dest, funcName) {
|
||||
const srcParent = path.resolve(path.dirname(src))
|
||||
const destParent = path.resolve(path.dirname(dest))
|
||||
if (destParent === srcParent || destParent === path.parse(destParent).root) return
|
||||
let destStat
|
||||
try {
|
||||
destStat = fs.statSync(destParent, { bigint: true })
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') return
|
||||
throw err
|
||||
}
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
throw new Error(errMsg(src, dest, funcName))
|
||||
}
|
||||
return checkParentPathsSync(src, srcStat, destParent, funcName)
|
||||
}
|
||||
|
||||
function areIdentical (srcStat, destStat) {
|
||||
return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev
|
||||
}
|
||||
|
||||
// return true if dest is a subdir of src, otherwise false.
|
||||
// It only checks the path strings.
|
||||
function isSrcSubdir (src, dest) {
|
||||
const srcArr = path.resolve(src).split(path.sep).filter(i => i)
|
||||
const destArr = path.resolve(dest).split(path.sep).filter(i => i)
|
||||
return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true)
|
||||
}
|
||||
|
||||
function errMsg (src, dest, funcName) {
|
||||
return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.`
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkPaths,
|
||||
checkPathsSync,
|
||||
checkParentPaths,
|
||||
checkParentPathsSync,
|
||||
isSrcSubdir,
|
||||
areIdentical
|
||||
}
|
||||
26
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/util/utimes.js
generated
vendored
Executable file
26
mc_test/node_modules/dmg-builder/node_modules/fs-extra/lib/util/utimes.js
generated
vendored
Executable file
@ -0,0 +1,26 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
|
||||
function utimesMillis (path, atime, mtime, callback) {
|
||||
// if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)
|
||||
fs.open(path, 'r+', (err, fd) => {
|
||||
if (err) return callback(err)
|
||||
fs.futimes(fd, atime, mtime, futimesErr => {
|
||||
fs.close(fd, closeErr => {
|
||||
if (callback) callback(futimesErr || closeErr)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function utimesMillisSync (path, atime, mtime) {
|
||||
const fd = fs.openSync(path, 'r+')
|
||||
fs.futimesSync(fd, atime, mtime)
|
||||
return fs.closeSync(fd)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
utimesMillis,
|
||||
utimesMillisSync
|
||||
}
|
||||
67
mc_test/node_modules/dmg-builder/node_modules/fs-extra/package.json
generated
vendored
Executable file
67
mc_test/node_modules/dmg-builder/node_modules/fs-extra/package.json
generated
vendored
Executable file
@ -0,0 +1,67 @@
|
||||
{
|
||||
"name": "fs-extra",
|
||||
"version": "10.1.0",
|
||||
"description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as recursive mkdir, copy, and remove.",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"homepage": "https://github.com/jprichardson/node-fs-extra",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/jprichardson/node-fs-extra"
|
||||
},
|
||||
"keywords": [
|
||||
"fs",
|
||||
"file",
|
||||
"file system",
|
||||
"copy",
|
||||
"directory",
|
||||
"extra",
|
||||
"mkdirp",
|
||||
"mkdir",
|
||||
"mkdirs",
|
||||
"recursive",
|
||||
"json",
|
||||
"read",
|
||||
"write",
|
||||
"extra",
|
||||
"delete",
|
||||
"remove",
|
||||
"touch",
|
||||
"create",
|
||||
"text",
|
||||
"output",
|
||||
"move",
|
||||
"promise"
|
||||
],
|
||||
"author": "JP Richardson <jprichardson@gmail.com>",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^6.0.1",
|
||||
"universalify": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"at-least-node": "^1.0.0",
|
||||
"klaw": "^2.1.1",
|
||||
"klaw-sync": "^3.0.2",
|
||||
"minimist": "^1.1.1",
|
||||
"mocha": "^5.0.5",
|
||||
"nyc": "^15.0.0",
|
||||
"proxyquire": "^2.0.1",
|
||||
"read-dir-files": "^0.1.1",
|
||||
"standard": "^16.0.3"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"files": [
|
||||
"lib/",
|
||||
"!lib/**/__tests__/"
|
||||
],
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha",
|
||||
"test": "npm run lint && npm run unit",
|
||||
"unit": "nyc node test.js"
|
||||
},
|
||||
"sideEffects": false
|
||||
}
|
||||
15
mc_test/node_modules/dmg-builder/node_modules/jsonfile/LICENSE
generated
vendored
Executable file
15
mc_test/node_modules/dmg-builder/node_modules/jsonfile/LICENSE
generated
vendored
Executable file
@ -0,0 +1,15 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2012-2015, JP Richardson <jprichardson@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
|
||||
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
|
||||
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
|
||||
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
230
mc_test/node_modules/dmg-builder/node_modules/jsonfile/README.md
generated
vendored
Executable file
230
mc_test/node_modules/dmg-builder/node_modules/jsonfile/README.md
generated
vendored
Executable file
@ -0,0 +1,230 @@
|
||||
Node.js - jsonfile
|
||||
================
|
||||
|
||||
Easily read/write JSON files in Node.js. _Note: this module cannot be used in the browser._
|
||||
|
||||
[](https://www.npmjs.org/package/jsonfile)
|
||||
[](https://github.com/jprichardson/node-jsonfile/actions?query=branch%3Amaster)
|
||||
[](https://ci.appveyor.com/project/jprichardson/node-jsonfile/branch/master)
|
||||
|
||||
<a href="https://github.com/feross/standard"><img src="https://cdn.rawgit.com/feross/standard/master/sticker.svg" alt="Standard JavaScript" width="100"></a>
|
||||
|
||||
Why?
|
||||
----
|
||||
|
||||
Writing `JSON.stringify()` and then `fs.writeFile()` and `JSON.parse()` with `fs.readFile()` enclosed in `try/catch` blocks became annoying.
|
||||
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
npm install --save jsonfile
|
||||
|
||||
|
||||
|
||||
API
|
||||
---
|
||||
|
||||
* [`readFile(filename, [options], callback)`](#readfilefilename-options-callback)
|
||||
* [`readFileSync(filename, [options])`](#readfilesyncfilename-options)
|
||||
* [`writeFile(filename, obj, [options], callback)`](#writefilefilename-obj-options-callback)
|
||||
* [`writeFileSync(filename, obj, [options])`](#writefilesyncfilename-obj-options)
|
||||
|
||||
----
|
||||
|
||||
### readFile(filename, [options], callback)
|
||||
|
||||
`options` (`object`, default `undefined`): Pass in any [`fs.readFile`](https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
|
||||
- `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, pass this error to the callback.
|
||||
If `false`, returns `null` for the object.
|
||||
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
const file = '/tmp/data.json'
|
||||
jsonfile.readFile(file, function (err, obj) {
|
||||
if (err) console.error(err)
|
||||
console.dir(obj)
|
||||
})
|
||||
```
|
||||
|
||||
You can also use this method with promises. The `readFile` method will return a promise if you do not pass a callback function.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
const file = '/tmp/data.json'
|
||||
jsonfile.readFile(file)
|
||||
.then(obj => console.dir(obj))
|
||||
.catch(error => console.error(error))
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
### readFileSync(filename, [options])
|
||||
|
||||
`options` (`object`, default `undefined`): Pass in any [`fs.readFileSync`](https://nodejs.org/api/fs.html#fs_fs_readfilesync_path_options) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
|
||||
- `throws` (`boolean`, default: `true`). If an error is encountered reading or parsing the file, throw the error. If `false`, returns `null` for the object.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
const file = '/tmp/data.json'
|
||||
|
||||
console.dir(jsonfile.readFileSync(file))
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
### writeFile(filename, obj, [options], callback)
|
||||
|
||||
`options`: Pass in any [`fs.writeFile`](https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end.
|
||||
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
Or use with promises as follows:
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj)
|
||||
.then(res => {
|
||||
console.log('Write complete')
|
||||
})
|
||||
.catch(error => console.error(error))
|
||||
```
|
||||
|
||||
|
||||
**formatting with spaces:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, { spaces: 2 }, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
|
||||
**overriding EOL:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, { spaces: 2, EOL: '\r\n' }, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
**disabling the EOL at the end of file:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, { spaces: 2, finalEOL: false }, function (err) {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
```
|
||||
|
||||
**appending to an existing JSON file:**
|
||||
|
||||
You can use `fs.writeFile` option `{ flag: 'a' }` to achieve this.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/mayAlreadyExistedData.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, { flag: 'a' }, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
### writeFileSync(filename, obj, [options])
|
||||
|
||||
`options`: Pass in any [`fs.writeFileSync`](https://nodejs.org/api/fs.html#fs_fs_writefilesync_file_data_options) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj)
|
||||
```
|
||||
|
||||
**formatting with spaces:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, { spaces: 2 })
|
||||
```
|
||||
|
||||
**overriding EOL:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, { spaces: 2, EOL: '\r\n' })
|
||||
```
|
||||
|
||||
**disabling the EOL at the end of file:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, { spaces: 2, finalEOL: false })
|
||||
```
|
||||
|
||||
**appending to an existing JSON file:**
|
||||
|
||||
You can use `fs.writeFileSync` option `{ flag: 'a' }` to achieve this.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/mayAlreadyExistedData.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, { flag: 'a' })
|
||||
```
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
(MIT License)
|
||||
|
||||
Copyright 2012-2016, JP Richardson <jprichardson@gmail.com>
|
||||
88
mc_test/node_modules/dmg-builder/node_modules/jsonfile/index.js
generated
vendored
Executable file
88
mc_test/node_modules/dmg-builder/node_modules/jsonfile/index.js
generated
vendored
Executable file
@ -0,0 +1,88 @@
|
||||
let _fs
|
||||
try {
|
||||
_fs = require('graceful-fs')
|
||||
} catch (_) {
|
||||
_fs = require('fs')
|
||||
}
|
||||
const universalify = require('universalify')
|
||||
const { stringify, stripBom } = require('./utils')
|
||||
|
||||
async function _readFile (file, options = {}) {
|
||||
if (typeof options === 'string') {
|
||||
options = { encoding: options }
|
||||
}
|
||||
|
||||
const fs = options.fs || _fs
|
||||
|
||||
const shouldThrow = 'throws' in options ? options.throws : true
|
||||
|
||||
let data = await universalify.fromCallback(fs.readFile)(file, options)
|
||||
|
||||
data = stripBom(data)
|
||||
|
||||
let obj
|
||||
try {
|
||||
obj = JSON.parse(data, options ? options.reviver : null)
|
||||
} catch (err) {
|
||||
if (shouldThrow) {
|
||||
err.message = `${file}: ${err.message}`
|
||||
throw err
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
const readFile = universalify.fromPromise(_readFile)
|
||||
|
||||
function readFileSync (file, options = {}) {
|
||||
if (typeof options === 'string') {
|
||||
options = { encoding: options }
|
||||
}
|
||||
|
||||
const fs = options.fs || _fs
|
||||
|
||||
const shouldThrow = 'throws' in options ? options.throws : true
|
||||
|
||||
try {
|
||||
let content = fs.readFileSync(file, options)
|
||||
content = stripBom(content)
|
||||
return JSON.parse(content, options.reviver)
|
||||
} catch (err) {
|
||||
if (shouldThrow) {
|
||||
err.message = `${file}: ${err.message}`
|
||||
throw err
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function _writeFile (file, obj, options = {}) {
|
||||
const fs = options.fs || _fs
|
||||
|
||||
const str = stringify(obj, options)
|
||||
|
||||
await universalify.fromCallback(fs.writeFile)(file, str, options)
|
||||
}
|
||||
|
||||
const writeFile = universalify.fromPromise(_writeFile)
|
||||
|
||||
function writeFileSync (file, obj, options = {}) {
|
||||
const fs = options.fs || _fs
|
||||
|
||||
const str = stringify(obj, options)
|
||||
// not sure if fs.writeFileSync returns anything, but just in case
|
||||
return fs.writeFileSync(file, str, options)
|
||||
}
|
||||
|
||||
// NOTE: do not change this export format; required for ESM compat
|
||||
// see https://github.com/jprichardson/node-jsonfile/pull/162 for details
|
||||
module.exports = {
|
||||
readFile,
|
||||
readFileSync,
|
||||
writeFile,
|
||||
writeFileSync
|
||||
}
|
||||
40
mc_test/node_modules/dmg-builder/node_modules/jsonfile/package.json
generated
vendored
Executable file
40
mc_test/node_modules/dmg-builder/node_modules/jsonfile/package.json
generated
vendored
Executable file
@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "jsonfile",
|
||||
"version": "6.2.0",
|
||||
"description": "Easily read/write JSON files.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git@github.com:jprichardson/node-jsonfile.git"
|
||||
},
|
||||
"keywords": [
|
||||
"read",
|
||||
"write",
|
||||
"file",
|
||||
"json",
|
||||
"fs",
|
||||
"fs-extra"
|
||||
],
|
||||
"author": "JP Richardson <jprichardson@gmail.com>",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"universalify": "^2.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^8.2.0",
|
||||
"rimraf": "^2.4.0",
|
||||
"standard": "^16.0.1"
|
||||
},
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"index.js",
|
||||
"utils.js"
|
||||
],
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"test": "npm run lint && npm run unit",
|
||||
"unit": "mocha"
|
||||
}
|
||||
}
|
||||
14
mc_test/node_modules/dmg-builder/node_modules/jsonfile/utils.js
generated
vendored
Executable file
14
mc_test/node_modules/dmg-builder/node_modules/jsonfile/utils.js
generated
vendored
Executable file
@ -0,0 +1,14 @@
|
||||
function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) {
|
||||
const EOF = finalEOL ? EOL : ''
|
||||
const str = JSON.stringify(obj, replacer, spaces)
|
||||
|
||||
return str.replace(/\n/g, EOL) + EOF
|
||||
}
|
||||
|
||||
function stripBom (content) {
|
||||
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified
|
||||
if (Buffer.isBuffer(content)) content = content.toString('utf8')
|
||||
return content.replace(/^\uFEFF/, '')
|
||||
}
|
||||
|
||||
module.exports = { stringify, stripBom }
|
||||
20
mc_test/node_modules/dmg-builder/node_modules/universalify/LICENSE
generated
vendored
Executable file
20
mc_test/node_modules/dmg-builder/node_modules/universalify/LICENSE
generated
vendored
Executable file
@ -0,0 +1,20 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2017, Ryan Zimmerman <opensrc@ryanzim.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the 'Software'), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
76
mc_test/node_modules/dmg-builder/node_modules/universalify/README.md
generated
vendored
Executable file
76
mc_test/node_modules/dmg-builder/node_modules/universalify/README.md
generated
vendored
Executable file
@ -0,0 +1,76 @@
|
||||
# universalify
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
Make a callback- or promise-based function support both promises and callbacks.
|
||||
|
||||
Uses the native promise implementation.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install universalify
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### `universalify.fromCallback(fn)`
|
||||
|
||||
Takes a callback-based function to universalify, and returns the universalified function.
|
||||
|
||||
Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with three or more arguments, and does not ensure that the callback is only called once.
|
||||
|
||||
```js
|
||||
function callbackFn (n, cb) {
|
||||
setTimeout(() => cb(null, n), 15)
|
||||
}
|
||||
|
||||
const fn = universalify.fromCallback(callbackFn)
|
||||
|
||||
// Works with Promises:
|
||||
fn('Hello World!')
|
||||
.then(result => console.log(result)) // -> Hello World!
|
||||
.catch(error => console.error(error))
|
||||
|
||||
// Works with Callbacks:
|
||||
fn('Hi!', (error, result) => {
|
||||
if (error) return console.error(error)
|
||||
console.log(result)
|
||||
// -> Hi!
|
||||
})
|
||||
```
|
||||
|
||||
### `universalify.fromPromise(fn)`
|
||||
|
||||
Takes a promise-based function to universalify, and returns the universalified function.
|
||||
|
||||
Function must return a valid JS promise. `universalify` does not ensure that a valid promise is returned.
|
||||
|
||||
```js
|
||||
function promiseFn (n) {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(() => resolve(n), 15)
|
||||
})
|
||||
}
|
||||
|
||||
const fn = universalify.fromPromise(promiseFn)
|
||||
|
||||
// Works with Promises:
|
||||
fn('Hello World!')
|
||||
.then(result => console.log(result)) // -> Hello World!
|
||||
.catch(error => console.error(error))
|
||||
|
||||
// Works with Callbacks:
|
||||
fn('Hi!', (error, result) => {
|
||||
if (error) return console.error(error)
|
||||
console.log(result)
|
||||
// -> Hi!
|
||||
})
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
24
mc_test/node_modules/dmg-builder/node_modules/universalify/index.js
generated
vendored
Executable file
24
mc_test/node_modules/dmg-builder/node_modules/universalify/index.js
generated
vendored
Executable file
@ -0,0 +1,24 @@
|
||||
'use strict'
|
||||
|
||||
exports.fromCallback = function (fn) {
|
||||
return Object.defineProperty(function (...args) {
|
||||
if (typeof args[args.length - 1] === 'function') fn.apply(this, args)
|
||||
else {
|
||||
return new Promise((resolve, reject) => {
|
||||
args.push((err, res) => (err != null) ? reject(err) : resolve(res))
|
||||
fn.apply(this, args)
|
||||
})
|
||||
}
|
||||
}, 'name', { value: fn.name })
|
||||
}
|
||||
|
||||
exports.fromPromise = function (fn) {
|
||||
return Object.defineProperty(function (...args) {
|
||||
const cb = args[args.length - 1]
|
||||
if (typeof cb !== 'function') return fn.apply(this, args)
|
||||
else {
|
||||
args.pop()
|
||||
fn.apply(this, args).then(r => cb(null, r), cb)
|
||||
}
|
||||
}, 'name', { value: fn.name })
|
||||
}
|
||||
34
mc_test/node_modules/dmg-builder/node_modules/universalify/package.json
generated
vendored
Executable file
34
mc_test/node_modules/dmg-builder/node_modules/universalify/package.json
generated
vendored
Executable file
@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "universalify",
|
||||
"version": "2.0.1",
|
||||
"description": "Make a callback- or promise-based function support both promises and callbacks.",
|
||||
"keywords": [
|
||||
"callback",
|
||||
"native",
|
||||
"promise"
|
||||
],
|
||||
"homepage": "https://github.com/RyanZim/universalify#readme",
|
||||
"bugs": "https://github.com/RyanZim/universalify/issues",
|
||||
"license": "MIT",
|
||||
"author": "Ryan Zimmerman <opensrc@ryanzim.com>",
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/RyanZim/universalify.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "standard && nyc --reporter text --reporter lcovonly tape test/*.js | colortape"
|
||||
},
|
||||
"devDependencies": {
|
||||
"colortape": "^0.1.2",
|
||||
"coveralls": "^3.0.1",
|
||||
"nyc": "^15.0.0",
|
||||
"standard": "^14.3.1",
|
||||
"tape": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10.0.0"
|
||||
}
|
||||
}
|
||||
13
mc_test/node_modules/dmg-builder/out/dmg.d.ts
generated
vendored
Executable file
13
mc_test/node_modules/dmg-builder/out/dmg.d.ts
generated
vendored
Executable file
@ -0,0 +1,13 @@
|
||||
import { DmgOptions, Target } from "app-builder-lib";
|
||||
import MacPackager from "app-builder-lib/out/macPackager";
|
||||
import { Arch } from "builder-util";
|
||||
export declare class DmgTarget extends Target {
|
||||
private readonly packager;
|
||||
readonly outDir: string;
|
||||
readonly options: DmgOptions;
|
||||
constructor(packager: MacPackager, outDir: string);
|
||||
build(appPath: string, arch: Arch): Promise<void>;
|
||||
private signDmg;
|
||||
computeVolumeName(arch: Arch, custom?: string | null): string;
|
||||
computeDmgOptions(): Promise<DmgOptions>;
|
||||
}
|
||||
322
mc_test/node_modules/dmg-builder/out/dmg.js
generated
vendored
Executable file
322
mc_test/node_modules/dmg-builder/out/dmg.js
generated
vendored
Executable file
@ -0,0 +1,322 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DmgTarget = void 0;
|
||||
const app_builder_lib_1 = require("app-builder-lib");
|
||||
const macCodeSign_1 = require("app-builder-lib/out/codeSign/macCodeSign");
|
||||
const differentialUpdateInfoBuilder_1 = require("app-builder-lib/out/targets/differentialUpdateInfoBuilder");
|
||||
const appBuilder_1 = require("app-builder-lib/out/util/appBuilder");
|
||||
const filename_1 = require("app-builder-lib/out/util/filename");
|
||||
const builder_util_1 = require("builder-util");
|
||||
const fs_1 = require("builder-util/out/fs");
|
||||
const fs_extra_1 = require("fs-extra");
|
||||
const path = require("path");
|
||||
const dmgLicense_1 = require("./dmgLicense");
|
||||
const dmgUtil_1 = require("./dmgUtil");
|
||||
const os_1 = require("os");
|
||||
class DmgTarget extends app_builder_lib_1.Target {
|
||||
constructor(packager, outDir) {
|
||||
super("dmg");
|
||||
this.packager = packager;
|
||||
this.outDir = outDir;
|
||||
this.options = this.packager.config.dmg || Object.create(null);
|
||||
}
|
||||
async build(appPath, arch) {
|
||||
const packager = this.packager;
|
||||
// tslint:disable-next-line:no-invalid-template-strings
|
||||
const artifactName = packager.expandArtifactNamePattern(this.options, "dmg", arch, "${productName}-" + (packager.platformSpecificBuildOptions.bundleShortVersion || "${version}") + "-${arch}.${ext}", true, packager.platformSpecificBuildOptions.defaultArch);
|
||||
const artifactPath = path.join(this.outDir, artifactName);
|
||||
await packager.info.callArtifactBuildStarted({
|
||||
targetPresentableName: "DMG",
|
||||
file: artifactPath,
|
||||
arch,
|
||||
});
|
||||
const volumeName = (0, filename_1.sanitizeFileName)(this.computeVolumeName(arch, this.options.title));
|
||||
const tempDmg = await createStageDmg(await packager.getTempFile(".dmg"), appPath, volumeName);
|
||||
const specification = await this.computeDmgOptions();
|
||||
// https://github.com/electron-userland/electron-builder/issues/2115
|
||||
const backgroundFile = specification.background == null ? null : await transformBackgroundFileIfNeed(specification.background, packager.info.tempDirManager);
|
||||
const finalSize = await computeAssetSize(packager.info.cancellationToken, tempDmg, specification, backgroundFile);
|
||||
const expandingFinalSize = finalSize * 0.1 + finalSize;
|
||||
await (0, builder_util_1.exec)("hdiutil", ["resize", "-size", expandingFinalSize.toString(), tempDmg]);
|
||||
const volumePath = path.join("/Volumes", volumeName);
|
||||
if (await (0, fs_1.exists)(volumePath)) {
|
||||
builder_util_1.log.debug({ volumePath }, "unmounting previous disk image");
|
||||
await (0, dmgUtil_1.detach)(volumePath);
|
||||
}
|
||||
if (!(await (0, dmgUtil_1.attachAndExecute)(tempDmg, true, () => customizeDmg(volumePath, specification, packager, backgroundFile)))) {
|
||||
return;
|
||||
}
|
||||
// dmg file must not exist otherwise hdiutil failed (https://github.com/electron-userland/electron-builder/issues/1308#issuecomment-282847594), so, -ov must be specified
|
||||
const args = ["convert", tempDmg, "-ov", "-format", specification.format, "-o", artifactPath];
|
||||
if (specification.format === "UDZO") {
|
||||
args.push("-imagekey", `zlib-level=${process.env.ELECTRON_BUILDER_COMPRESSION_LEVEL || "9"}`);
|
||||
}
|
||||
await (0, builder_util_1.spawn)("hdiutil", addLogLevel(args));
|
||||
if (this.options.internetEnabled && parseInt((0, os_1.release)().split(".")[0], 10) < 19) {
|
||||
await (0, builder_util_1.exec)("hdiutil", addLogLevel(["internet-enable"]).concat(artifactPath));
|
||||
}
|
||||
const licenseData = await (0, dmgLicense_1.addLicenseToDmg)(packager, artifactPath);
|
||||
if (packager.packagerOptions.effectiveOptionComputed != null) {
|
||||
await packager.packagerOptions.effectiveOptionComputed({ licenseData });
|
||||
}
|
||||
if (this.options.sign === true) {
|
||||
await this.signDmg(artifactPath);
|
||||
}
|
||||
const safeArtifactName = packager.computeSafeArtifactName(artifactName, "dmg");
|
||||
const updateInfo = this.options.writeUpdateInfo === false ? null : await (0, differentialUpdateInfoBuilder_1.createBlockmap)(artifactPath, this, packager, safeArtifactName);
|
||||
await packager.info.callArtifactBuildCompleted({
|
||||
file: artifactPath,
|
||||
safeArtifactName,
|
||||
target: this,
|
||||
arch,
|
||||
packager,
|
||||
isWriteUpdateInfo: updateInfo != null,
|
||||
updateInfo,
|
||||
});
|
||||
}
|
||||
async signDmg(artifactPath) {
|
||||
if (!(0, macCodeSign_1.isSignAllowed)(false)) {
|
||||
return;
|
||||
}
|
||||
const packager = this.packager;
|
||||
const qualifier = packager.platformSpecificBuildOptions.identity;
|
||||
// explicitly disabled if set to null
|
||||
if (qualifier === null) {
|
||||
// macPackager already somehow handle this situation, so, here just return
|
||||
return;
|
||||
}
|
||||
const keychainFile = (await packager.codeSigningInfo.value).keychainFile;
|
||||
const certificateType = "Developer ID Application";
|
||||
let identity = await (0, macCodeSign_1.findIdentity)(certificateType, qualifier, keychainFile);
|
||||
if (identity == null) {
|
||||
identity = await (0, macCodeSign_1.findIdentity)("Mac Developer", qualifier, keychainFile);
|
||||
if (identity == null) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
const args = ["--sign", identity.hash];
|
||||
if (keychainFile != null) {
|
||||
args.push("--keychain", keychainFile);
|
||||
}
|
||||
args.push(artifactPath);
|
||||
await (0, builder_util_1.exec)("codesign", args);
|
||||
}
|
||||
computeVolumeName(arch, custom) {
|
||||
const appInfo = this.packager.appInfo;
|
||||
const shortVersion = this.packager.platformSpecificBuildOptions.bundleShortVersion || appInfo.version;
|
||||
const archString = (0, builder_util_1.getArchSuffix)(arch, this.packager.platformSpecificBuildOptions.defaultArch);
|
||||
if (custom == null) {
|
||||
return `${appInfo.productFilename} ${shortVersion}${archString}`;
|
||||
}
|
||||
return custom
|
||||
.replace(/\${arch}/g, archString)
|
||||
.replace(/\${shortVersion}/g, shortVersion)
|
||||
.replace(/\${version}/g, appInfo.version)
|
||||
.replace(/\${name}/g, appInfo.name)
|
||||
.replace(/\${productName}/g, appInfo.productName);
|
||||
}
|
||||
// public to test
|
||||
async computeDmgOptions() {
|
||||
const packager = this.packager;
|
||||
const specification = { ...this.options };
|
||||
if (specification.icon == null && specification.icon !== null) {
|
||||
specification.icon = await packager.getIconPath();
|
||||
}
|
||||
if (specification.icon != null && (0, builder_util_1.isEmptyOrSpaces)(specification.icon)) {
|
||||
throw new builder_util_1.InvalidConfigurationError("dmg.icon cannot be specified as empty string");
|
||||
}
|
||||
const background = specification.background;
|
||||
if (specification.backgroundColor != null) {
|
||||
if (background != null) {
|
||||
throw new builder_util_1.InvalidConfigurationError("Both dmg.backgroundColor and dmg.background are specified — please set the only one");
|
||||
}
|
||||
}
|
||||
else if (background == null) {
|
||||
specification.background = await (0, dmgUtil_1.computeBackground)(packager);
|
||||
}
|
||||
else {
|
||||
specification.background = path.resolve(packager.info.projectDir, background);
|
||||
}
|
||||
if (specification.format == null) {
|
||||
if (process.env.ELECTRON_BUILDER_COMPRESSION_LEVEL != null) {
|
||||
;
|
||||
specification.format = "UDZO";
|
||||
}
|
||||
else if (packager.compression === "store") {
|
||||
specification.format = "UDRO";
|
||||
}
|
||||
else {
|
||||
specification.format = packager.compression === "maximum" ? "UDBZ" : "UDZO";
|
||||
}
|
||||
}
|
||||
if (specification.contents == null) {
|
||||
specification.contents = [
|
||||
{
|
||||
x: 130,
|
||||
y: 220,
|
||||
},
|
||||
{
|
||||
x: 410,
|
||||
y: 220,
|
||||
type: "link",
|
||||
path: "/Applications",
|
||||
},
|
||||
];
|
||||
}
|
||||
return specification;
|
||||
}
|
||||
}
|
||||
exports.DmgTarget = DmgTarget;
|
||||
async function createStageDmg(tempDmg, appPath, volumeName) {
|
||||
//noinspection SpellCheckingInspection
|
||||
const imageArgs = addLogLevel(["create", "-srcfolder", appPath, "-volname", volumeName, "-anyowners", "-nospotlight", "-format", "UDRW"]);
|
||||
if (builder_util_1.log.isDebugEnabled) {
|
||||
imageArgs.push("-debug");
|
||||
}
|
||||
let filesystem = ["HFS+", "-fsargs", "-c c=64,a=16,e=16"];
|
||||
if (process.arch === "arm64") {
|
||||
// Apple Silicon `hdiutil` dropped support for HFS+, so we force the latest type
|
||||
// https://github.com/electron-userland/electron-builder/issues/4606
|
||||
filesystem = ["APFS"];
|
||||
builder_util_1.log.warn(null, "Detected arm64 process, HFS+ is unavailable. Creating dmg with APFS - supports Mac OSX 10.12+");
|
||||
}
|
||||
imageArgs.push("-fs", ...filesystem);
|
||||
imageArgs.push(tempDmg);
|
||||
// The reason for retrying up to ten times is that hdiutil create in some cases fail to unmount due to "resource busy".
|
||||
// https://github.com/electron-userland/electron-builder/issues/5431
|
||||
await (0, builder_util_1.retry)(() => (0, builder_util_1.spawn)("hdiutil", imageArgs), 5, 1000);
|
||||
return tempDmg;
|
||||
}
|
||||
function addLogLevel(args) {
|
||||
args.push(process.env.DEBUG_DMG === "true" ? "-verbose" : "-quiet");
|
||||
return args;
|
||||
}
|
||||
async function computeAssetSize(cancellationToken, dmgFile, specification, backgroundFile) {
|
||||
const asyncTaskManager = new builder_util_1.AsyncTaskManager(cancellationToken);
|
||||
asyncTaskManager.addTask((0, fs_extra_1.stat)(dmgFile));
|
||||
if (specification.icon != null) {
|
||||
asyncTaskManager.addTask((0, fs_1.statOrNull)(specification.icon));
|
||||
}
|
||||
if (backgroundFile != null) {
|
||||
asyncTaskManager.addTask((0, fs_extra_1.stat)(backgroundFile));
|
||||
}
|
||||
let result = 32 * 1024;
|
||||
for (const stat of await asyncTaskManager.awaitTasks()) {
|
||||
if (stat != null) {
|
||||
result += stat.size;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function customizeDmg(volumePath, specification, packager, backgroundFile) {
|
||||
const window = specification.window;
|
||||
const isValidIconTextSize = !!specification.iconTextSize && specification.iconTextSize >= 10 && specification.iconTextSize <= 16;
|
||||
const iconTextSize = isValidIconTextSize ? specification.iconTextSize : 12;
|
||||
const env = {
|
||||
...process.env,
|
||||
volumePath,
|
||||
appFileName: `${packager.appInfo.productFilename}.app`,
|
||||
iconSize: specification.iconSize || 80,
|
||||
iconTextSize,
|
||||
PYTHONIOENCODING: "utf8",
|
||||
};
|
||||
if (specification.backgroundColor != null || specification.background == null) {
|
||||
env.backgroundColor = specification.backgroundColor || "#ffffff";
|
||||
if (window != null) {
|
||||
env.windowX = (window.x == null ? 100 : window.x).toString();
|
||||
env.windowY = (window.y == null ? 400 : window.y).toString();
|
||||
env.windowWidth = (window.width || 540).toString();
|
||||
env.windowHeight = (window.height || 380).toString();
|
||||
}
|
||||
}
|
||||
else {
|
||||
delete env.backgroundColor;
|
||||
}
|
||||
const args = ["dmg", "--volume", volumePath];
|
||||
if (specification.icon != null) {
|
||||
args.push("--icon", (await packager.getResource(specification.icon)));
|
||||
}
|
||||
if (backgroundFile != null) {
|
||||
args.push("--background", backgroundFile);
|
||||
}
|
||||
const data = await (0, appBuilder_1.executeAppBuilderAsJson)(args);
|
||||
if (data.backgroundWidth != null) {
|
||||
env.windowWidth = window == null ? null : window.width;
|
||||
env.windowHeight = window == null ? null : window.height;
|
||||
if (env.windowWidth == null) {
|
||||
env.windowWidth = data.backgroundWidth.toString();
|
||||
}
|
||||
if (env.windowHeight == null) {
|
||||
env.windowHeight = data.backgroundHeight.toString();
|
||||
}
|
||||
if (env.windowX == null) {
|
||||
env.windowX = 400;
|
||||
}
|
||||
if (env.windowY == null) {
|
||||
env.windowY = Math.round((1440 - env.windowHeight) / 2).toString();
|
||||
}
|
||||
}
|
||||
Object.assign(env, data);
|
||||
const asyncTaskManager = new builder_util_1.AsyncTaskManager(packager.info.cancellationToken);
|
||||
env.iconLocations = await computeDmgEntries(specification, volumePath, packager, asyncTaskManager);
|
||||
await asyncTaskManager.awaitTasks();
|
||||
const executePython = async (execName) => {
|
||||
let pythonPath = process.env.PYTHON_PATH;
|
||||
if (!pythonPath) {
|
||||
pythonPath = (await (0, builder_util_1.exec)("which", [execName])).trim();
|
||||
}
|
||||
await (0, builder_util_1.exec)(pythonPath, [path.join((0, dmgUtil_1.getDmgVendorPath)(), "dmgbuild/core.py")], {
|
||||
cwd: (0, dmgUtil_1.getDmgVendorPath)(),
|
||||
env,
|
||||
});
|
||||
};
|
||||
try {
|
||||
await executePython("python3");
|
||||
}
|
||||
catch (error) {
|
||||
await executePython("python");
|
||||
}
|
||||
return packager.packagerOptions.effectiveOptionComputed == null || !(await packager.packagerOptions.effectiveOptionComputed({ volumePath, specification, packager }));
|
||||
}
|
||||
async function computeDmgEntries(specification, volumePath, packager, asyncTaskManager) {
|
||||
let result = "";
|
||||
for (const c of specification.contents) {
|
||||
if (c.path != null && c.path.endsWith(".app") && c.type !== "link") {
|
||||
builder_util_1.log.warn({ path: c.path, reason: "actual path to app will be used instead" }, "do not specify path for application");
|
||||
}
|
||||
const entryPath = c.path || `${packager.appInfo.productFilename}.app`;
|
||||
const entryName = c.name || path.basename(entryPath);
|
||||
const escapedEntryName = entryName.replace(/['\\]/g, match => `\\${match}`);
|
||||
if (result.length !== 0) {
|
||||
result += ",\n";
|
||||
}
|
||||
result += `'${escapedEntryName}': (${c.x}, ${c.y})`;
|
||||
if (c.type === "link") {
|
||||
asyncTaskManager.addTask((0, builder_util_1.exec)("ln", ["-s", `/${entryPath.startsWith("/") ? entryPath.substring(1) : entryPath}`, `${volumePath}/${entryName}`]));
|
||||
}
|
||||
// use c.path instead of entryPath (to be sure that this logic is not applied to .app bundle) https://github.com/electron-userland/electron-builder/issues/2147
|
||||
else if (!(0, builder_util_1.isEmptyOrSpaces)(c.path) && (c.type === "file" || c.type === "dir")) {
|
||||
const source = await packager.getResource(c.path);
|
||||
if (source == null) {
|
||||
builder_util_1.log.warn({ entryPath, reason: "doesn't exist" }, "skipped DMG item copying");
|
||||
continue;
|
||||
}
|
||||
const destination = `${volumePath}/${entryName}`;
|
||||
asyncTaskManager.addTask(c.type === "dir" || (await (0, fs_extra_1.stat)(source)).isDirectory() ? (0, fs_1.copyDir)(source, destination) : (0, fs_1.copyFile)(source, destination));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function transformBackgroundFileIfNeed(file, tmpDir) {
|
||||
if (file.endsWith(".tiff") || file.endsWith(".TIFF")) {
|
||||
return file;
|
||||
}
|
||||
const retinaFile = file.replace(/\.([a-z]+)$/, "@2x.$1");
|
||||
if (await (0, fs_1.exists)(retinaFile)) {
|
||||
const tiffFile = await tmpDir.getTempFile({ suffix: ".tiff" });
|
||||
await (0, builder_util_1.exec)("tiffutil", ["-cathidpicheck", file, retinaFile, "-out", tiffFile]);
|
||||
return tiffFile;
|
||||
}
|
||||
return file;
|
||||
}
|
||||
//# sourceMappingURL=dmg.js.map
|
||||
1
mc_test/node_modules/dmg-builder/out/dmg.js.map
generated
vendored
Executable file
1
mc_test/node_modules/dmg-builder/out/dmg.js.map
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
8
mc_test/node_modules/dmg-builder/out/dmgLicense.d.ts
generated
vendored
Executable file
8
mc_test/node_modules/dmg-builder/out/dmgLicense.d.ts
generated
vendored
Executable file
@ -0,0 +1,8 @@
|
||||
import { PlatformPackager } from "app-builder-lib";
|
||||
type LicenseConfig = {
|
||||
$schema: string;
|
||||
body: any[];
|
||||
labels: any[];
|
||||
};
|
||||
export declare function addLicenseToDmg(packager: PlatformPackager<any>, dmgPath: string): Promise<LicenseConfig | null>;
|
||||
export {};
|
||||
48
mc_test/node_modules/dmg-builder/out/dmgLicense.js
generated
vendored
Executable file
48
mc_test/node_modules/dmg-builder/out/dmgLicense.js
generated
vendored
Executable file
@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.addLicenseToDmg = void 0;
|
||||
const builder_util_1 = require("builder-util");
|
||||
const js_yaml_1 = require("js-yaml");
|
||||
const license_1 = require("app-builder-lib/out/util/license");
|
||||
const fs_extra_1 = require("fs-extra");
|
||||
const licenseButtons_1 = require("./licenseButtons");
|
||||
const dmg_license_1 = require("dmg-license");
|
||||
async function addLicenseToDmg(packager, dmgPath) {
|
||||
const licenseFiles = await (0, license_1.getLicenseFiles)(packager);
|
||||
if (licenseFiles.length === 0) {
|
||||
return null;
|
||||
}
|
||||
const licenseButtonFiles = await (0, licenseButtons_1.getLicenseButtonsFile)(packager);
|
||||
packager.debugLogger.add("dmg.licenseFiles", licenseFiles);
|
||||
packager.debugLogger.add("dmg.licenseButtons", licenseButtonFiles);
|
||||
const jsonFile = {
|
||||
$schema: "https://github.com/argv-minus-one/dmg-license/raw/master/schema.json",
|
||||
// defaultLang: '',
|
||||
body: [],
|
||||
labels: [],
|
||||
};
|
||||
for (const file of licenseFiles) {
|
||||
jsonFile.body.push({
|
||||
file: file.file,
|
||||
lang: file.langWithRegion.replace("_", "-"),
|
||||
});
|
||||
}
|
||||
for (const button of licenseButtonFiles) {
|
||||
const filepath = button.file;
|
||||
const label = filepath.endsWith(".yml") ? (0, js_yaml_1.load)(await (0, fs_extra_1.readFile)(filepath, "utf-8")) : await (0, fs_extra_1.readJson)(filepath);
|
||||
if (label.description) {
|
||||
// to support original button file format
|
||||
label.message = label.description;
|
||||
delete label.description;
|
||||
}
|
||||
jsonFile.labels.push(Object.assign({
|
||||
lang: button.langWithRegion.replace("_", "-"),
|
||||
}, label));
|
||||
}
|
||||
await (0, dmg_license_1.dmgLicenseFromJSON)(dmgPath, jsonFile, {
|
||||
onNonFatalError: builder_util_1.log.warn.bind(builder_util_1.log),
|
||||
});
|
||||
return jsonFile;
|
||||
}
|
||||
exports.addLicenseToDmg = addLicenseToDmg;
|
||||
//# sourceMappingURL=dmgLicense.js.map
|
||||
1
mc_test/node_modules/dmg-builder/out/dmgLicense.js.map
generated
vendored
Executable file
1
mc_test/node_modules/dmg-builder/out/dmgLicense.js.map
generated
vendored
Executable file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"dmgLicense.js","sourceRoot":"","sources":["../src/dmgLicense.ts"],"names":[],"mappings":";;;AAAA,+CAAkC;AAClC,qCAA8B;AAE9B,8DAAkE;AAClE,uCAA6C;AAC7C,qDAAwD;AACxD,6CAAgD;AAUzC,KAAK,UAAU,eAAe,CAAC,QAA+B,EAAE,OAAe;IACpF,MAAM,YAAY,GAAG,MAAM,IAAA,yBAAe,EAAC,QAAQ,CAAC,CAAA;IACpD,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC9B,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,kBAAkB,GAAG,MAAM,IAAA,sCAAqB,EAAC,QAAQ,CAAC,CAAA;IAChE,QAAQ,CAAC,WAAW,CAAC,GAAG,CAAC,kBAAkB,EAAE,YAAY,CAAC,CAAA;IAC1D,QAAQ,CAAC,WAAW,CAAC,GAAG,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAA;IAElE,MAAM,QAAQ,GAAkB;QAC9B,OAAO,EAAE,sEAAsE;QAC/E,mBAAmB;QACnB,IAAI,EAAE,EAAE;QACR,MAAM,EAAE,EAAE;KACX,CAAA;IAED,KAAK,MAAM,IAAI,IAAI,YAAY,EAAE,CAAC;QAChC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC;YACjB,IAAI,EAAE,IAAI,CAAC,IAAI;YACf,IAAI,EAAE,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC;SAC5C,CAAC,CAAA;IACJ,CAAC;IAED,KAAK,MAAM,MAAM,IAAI,kBAAkB,EAAE,CAAC;QACxC,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAA;QAC5B,MAAM,KAAK,GAAG,QAAQ,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAA,cAAI,EAAC,MAAM,IAAA,mBAAQ,EAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,IAAA,mBAAQ,EAAC,QAAQ,CAAC,CAAA;QAC5G,IAAI,KAAK,CAAC,WAAW,EAAE,CAAC;YACtB,yCAAyC;YACzC,KAAK,CAAC,OAAO,GAAG,KAAK,CAAC,WAAW,CAAA;YACjC,OAAO,KAAK,CAAC,WAAW,CAAA;QAC1B,CAAC;QACD,QAAQ,CAAC,MAAM,CAAC,IAAI,CAClB,MAAM,CAAC,MAAM,CACX;YACE,IAAI,EAAE,MAAM,CAAC,cAAc,CAAC,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC;SAC9C,EACD,KAAK,CACN,CACF,CAAA;IACH,CAAC;IAED,MAAM,IAAA,gCAAkB,EAAC,OAAO,EAAE,QAAQ,EAAE;QAC1C,eAAe,EAAE,kBAAG,CAAC,IAAI,CAAC,IAAI,CAAC,kBAAG,CAAC;KACpC,CAAC,CAAA;IAEF,OAAO,QAAQ,CAAA;AACjB,CAAC;AA/CD,0CA+CC","sourcesContent":["import { log } from \"builder-util\"\nimport { load } from \"js-yaml\"\nimport { PlatformPackager } from \"app-builder-lib\"\nimport { getLicenseFiles } from \"app-builder-lib/out/util/license\"\nimport { readFile, readJson } from \"fs-extra\"\nimport { getLicenseButtonsFile } from \"./licenseButtons\"\nimport { dmgLicenseFromJSON } from \"dmg-license\"\n\n// License Specifications\n// https://github.com/argv-minus-one/dmg-license/blob/HEAD/docs/License%20Specifications.md\ntype LicenseConfig = {\n $schema: string\n body: any[]\n labels: any[]\n}\n\nexport async function addLicenseToDmg(packager: PlatformPackager<any>, dmgPath: string): Promise<LicenseConfig | null> {\n const licenseFiles = await getLicenseFiles(packager)\n if (licenseFiles.length === 0) {\n return null\n }\n\n const licenseButtonFiles = await getLicenseButtonsFile(packager)\n packager.debugLogger.add(\"dmg.licenseFiles\", licenseFiles)\n packager.debugLogger.add(\"dmg.licenseButtons\", licenseButtonFiles)\n\n const jsonFile: LicenseConfig = {\n $schema: \"https://github.com/argv-minus-one/dmg-license/raw/master/schema.json\",\n // defaultLang: '',\n body: [],\n labels: [],\n }\n\n for (const file of licenseFiles) {\n jsonFile.body.push({\n file: file.file,\n lang: file.langWithRegion.replace(\"_\", \"-\"),\n })\n }\n\n for (const button of licenseButtonFiles) {\n const filepath = button.file\n const label = filepath.endsWith(\".yml\") ? load(await readFile(filepath, \"utf-8\")) : await readJson(filepath)\n if (label.description) {\n // to support original button file format\n label.message = label.description\n delete label.description\n }\n jsonFile.labels.push(\n Object.assign(\n {\n lang: button.langWithRegion.replace(\"_\", \"-\"),\n },\n label\n )\n )\n }\n\n await dmgLicenseFromJSON(dmgPath, jsonFile, {\n onNonFatalError: log.warn.bind(log),\n })\n\n return jsonFile\n}\n"]}
|
||||
7
mc_test/node_modules/dmg-builder/out/dmgUtil.d.ts
generated
vendored
Executable file
7
mc_test/node_modules/dmg-builder/out/dmgUtil.d.ts
generated
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
import { PlatformPackager } from "app-builder-lib";
|
||||
export { DmgTarget } from "./dmg";
|
||||
export declare function getDmgTemplatePath(): string;
|
||||
export declare function getDmgVendorPath(): string;
|
||||
export declare function attachAndExecute(dmgPath: string, readWrite: boolean, task: () => Promise<any>): Promise<any>;
|
||||
export declare function detach(name: string): Promise<void>;
|
||||
export declare function computeBackground(packager: PlatformPackager<any>): Promise<string>;
|
||||
66
mc_test/node_modules/dmg-builder/out/dmgUtil.js
generated
vendored
Executable file
66
mc_test/node_modules/dmg-builder/out/dmgUtil.js
generated
vendored
Executable file
@ -0,0 +1,66 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.serializeString = exports.computeBackground = exports.detach = exports.attachAndExecute = exports.getDmgVendorPath = exports.getDmgTemplatePath = exports.DmgTarget = void 0;
|
||||
const builder_util_1 = require("builder-util");
|
||||
const promise_1 = require("builder-util/out/promise");
|
||||
const path = require("path");
|
||||
var dmg_1 = require("./dmg");
|
||||
Object.defineProperty(exports, "DmgTarget", { enumerable: true, get: function () { return dmg_1.DmgTarget; } });
|
||||
const root = path.join(__dirname, "..");
|
||||
function getDmgTemplatePath() {
|
||||
return path.join(root, "templates");
|
||||
}
|
||||
exports.getDmgTemplatePath = getDmgTemplatePath;
|
||||
function getDmgVendorPath() {
|
||||
return path.join(root, "vendor");
|
||||
}
|
||||
exports.getDmgVendorPath = getDmgVendorPath;
|
||||
async function attachAndExecute(dmgPath, readWrite, task) {
|
||||
//noinspection SpellCheckingInspection
|
||||
const args = ["attach", "-noverify", "-noautoopen"];
|
||||
if (readWrite) {
|
||||
args.push("-readwrite");
|
||||
}
|
||||
args.push(dmgPath);
|
||||
const attachResult = await (0, builder_util_1.exec)("hdiutil", args);
|
||||
const deviceResult = attachResult == null ? null : /^(\/dev\/\w+)/.exec(attachResult);
|
||||
const device = deviceResult == null || deviceResult.length !== 2 ? null : deviceResult[1];
|
||||
if (device == null) {
|
||||
throw new Error(`Cannot mount: ${attachResult}`);
|
||||
}
|
||||
return await (0, promise_1.executeFinally)(task(), () => detach(device));
|
||||
}
|
||||
exports.attachAndExecute = attachAndExecute;
|
||||
async function detach(name) {
|
||||
try {
|
||||
await (0, builder_util_1.exec)("hdiutil", ["detach", "-quiet", name]);
|
||||
}
|
||||
catch (e) {
|
||||
await (0, builder_util_1.retry)(() => (0, builder_util_1.exec)("hdiutil", ["detach", "-force", "-debug", name]), 5, 1000, 500);
|
||||
}
|
||||
}
|
||||
exports.detach = detach;
|
||||
async function computeBackground(packager) {
|
||||
const resourceList = await packager.resourceList;
|
||||
if (resourceList.includes("background.tiff")) {
|
||||
return path.join(packager.buildResourcesDir, "background.tiff");
|
||||
}
|
||||
else if (resourceList.includes("background.png")) {
|
||||
return path.join(packager.buildResourcesDir, "background.png");
|
||||
}
|
||||
else {
|
||||
return path.join(getDmgTemplatePath(), "background.tiff");
|
||||
}
|
||||
}
|
||||
exports.computeBackground = computeBackground;
|
||||
/** @internal */
|
||||
function serializeString(data) {
|
||||
return (' $"' +
|
||||
data
|
||||
.match(/.{1,32}/g)
|
||||
.map(it => it.match(/.{1,4}/g).join(" "))
|
||||
.join('"\n $"') +
|
||||
'"');
|
||||
}
|
||||
exports.serializeString = serializeString;
|
||||
//# sourceMappingURL=dmgUtil.js.map
|
||||
1
mc_test/node_modules/dmg-builder/out/dmgUtil.js.map
generated
vendored
Executable file
1
mc_test/node_modules/dmg-builder/out/dmgUtil.js.map
generated
vendored
Executable file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"dmgUtil.js","sourceRoot":"","sources":["../src/dmgUtil.ts"],"names":[],"mappings":";;;AAAA,+CAA0C;AAE1C,sDAAyD;AACzD,6BAA4B;AAE5B,6BAAiC;AAAxB,gGAAA,SAAS,OAAA;AAElB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,CAAA;AAEvC,SAAgB,kBAAkB;IAChC,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAA;AACrC,CAAC;AAFD,gDAEC;AAED,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AAClC,CAAC;AAFD,4CAEC;AAEM,KAAK,UAAU,gBAAgB,CAAC,OAAe,EAAE,SAAkB,EAAE,IAAwB;IAClG,sCAAsC;IACtC,MAAM,IAAI,GAAG,CAAC,QAAQ,EAAE,WAAW,EAAE,aAAa,CAAC,CAAA;IACnD,IAAI,SAAS,EAAE,CAAC;QACd,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;IACzB,CAAC;IAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;IAClB,MAAM,YAAY,GAAG,MAAM,IAAA,mBAAI,EAAC,SAAS,EAAE,IAAI,CAAC,CAAA;IAChD,MAAM,YAAY,GAAG,YAAY,IAAI,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,eAAe,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;IACrF,MAAM,MAAM,GAAG,YAAY,IAAI,IAAI,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAA;IACzF,IAAI,MAAM,IAAI,IAAI,EAAE,CAAC;QACnB,MAAM,IAAI,KAAK,CAAC,iBAAiB,YAAY,EAAE,CAAC,CAAA;IAClD,CAAC;IAED,OAAO,MAAM,IAAA,wBAAc,EAAC,IAAI,EAAE,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAA;AAC3D,CAAC;AAhBD,4CAgBC;AAEM,KAAK,UAAU,MAAM,CAAC,IAAY;IACvC,IAAI,CAAC;QACH,MAAM,IAAA,mBAAI,EAAC,SAAS,EAAE,CAAC,QAAQ,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC,CAAA;IACnD,CAAC;IAAC,OAAO,CAAM,EAAE,CAAC;QAChB,MAAM,IAAA,oBAAK,EAAC,GAAG,EAAE,CAAC,IAAA,mBAAI,EAAC,SAAS,EAAE,CAAC,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,CAAC,CAAA;IACxF,CAAC;AACH,CAAC;AAND,wBAMC;AAEM,KAAK,UAAU,iBAAiB,CAAC,QAA+B;IACrE,MAAM,YAAY,GAAG,MAAM,QAAQ,CAAC,YAAY,CAAA;IAChD,IAAI,YAAY,CAAC,QAAQ,CAAC,iBAAiB,CAAC,EAAE,CAAC;QAC7C,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,iBAAiB,EAAE,iBAAiB,CAAC,CAAA;IACjE,CAAC;SAAM,IAAI,YAAY,CAAC,QAAQ,CAAC,gBAAgB,CAAC,EAAE,CAAC;QACnD,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,iBAAiB,EAAE,gBAAgB,CAAC,CAAA;IAChE,CAAC;SAAM,CAAC;QACN,OAAO,IAAI,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,iBAAiB,CAAC,CAAA;IAC3D,CAAC;AACH,CAAC;AATD,8CASC;AAED,gBAAgB;AAChB,SAAgB,eAAe,CAAC,IAAY;IAC1C,OAAO,CACL,MAAM;QACN,IAAI;aACD,KAAK,CAAC,UAAU,CAAE;aAClB,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACzC,IAAI,CAAC,SAAS,CAAC;QAClB,GAAG,CACJ,CAAA;AACH,CAAC;AATD,0CASC","sourcesContent":["import { exec, retry } from \"builder-util\"\nimport { PlatformPackager } from \"app-builder-lib\"\nimport { executeFinally } from \"builder-util/out/promise\"\nimport * as path from \"path\"\n\nexport { DmgTarget } from \"./dmg\"\n\nconst root = path.join(__dirname, \"..\")\n\nexport function getDmgTemplatePath() {\n return path.join(root, \"templates\")\n}\n\nexport function getDmgVendorPath() {\n return path.join(root, \"vendor\")\n}\n\nexport async function attachAndExecute(dmgPath: string, readWrite: boolean, task: () => Promise<any>) {\n //noinspection SpellCheckingInspection\n const args = [\"attach\", \"-noverify\", \"-noautoopen\"]\n if (readWrite) {\n args.push(\"-readwrite\")\n }\n\n args.push(dmgPath)\n const attachResult = await exec(\"hdiutil\", args)\n const deviceResult = attachResult == null ? null : /^(\\/dev\\/\\w+)/.exec(attachResult)\n const device = deviceResult == null || deviceResult.length !== 2 ? null : deviceResult[1]\n if (device == null) {\n throw new Error(`Cannot mount: ${attachResult}`)\n }\n\n return await executeFinally(task(), () => detach(device))\n}\n\nexport async function detach(name: string) {\n try {\n await exec(\"hdiutil\", [\"detach\", \"-quiet\", name])\n } catch (e: any) {\n await retry(() => exec(\"hdiutil\", [\"detach\", \"-force\", \"-debug\", name]), 5, 1000, 500)\n }\n}\n\nexport async function computeBackground(packager: PlatformPackager<any>): Promise<string> {\n const resourceList = await packager.resourceList\n if (resourceList.includes(\"background.tiff\")) {\n return path.join(packager.buildResourcesDir, \"background.tiff\")\n } else if (resourceList.includes(\"background.png\")) {\n return path.join(packager.buildResourcesDir, \"background.png\")\n } else {\n return path.join(getDmgTemplatePath(), \"background.tiff\")\n }\n}\n\n/** @internal */\nexport function serializeString(data: string) {\n return (\n ' $\"' +\n data\n .match(/.{1,32}/g)!\n .map(it => it.match(/.{1,4}/g)!.join(\" \"))\n .join('\"\\n $\"') +\n '\"'\n )\n}\n"]}
|
||||
9
mc_test/node_modules/dmg-builder/out/licenseButtons.d.ts
generated
vendored
Executable file
9
mc_test/node_modules/dmg-builder/out/licenseButtons.d.ts
generated
vendored
Executable file
@ -0,0 +1,9 @@
|
||||
import { PlatformPackager } from "app-builder-lib";
|
||||
export declare function getLicenseButtonsFile(packager: PlatformPackager<any>): Promise<Array<LicenseButtonsFile>>;
|
||||
export interface LicenseButtonsFile {
|
||||
file: string;
|
||||
lang: string;
|
||||
langWithRegion: string;
|
||||
langName: string;
|
||||
}
|
||||
export declare function getLicenseButtons(licenseButtonFiles: Array<LicenseButtonsFile>, langWithRegion: string, id: number, name: string): Promise<string>;
|
||||
144
mc_test/node_modules/dmg-builder/out/licenseButtons.js
generated
vendored
Executable file
144
mc_test/node_modules/dmg-builder/out/licenseButtons.js
generated
vendored
Executable file
@ -0,0 +1,144 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getLicenseButtons = exports.getLicenseButtonsFile = void 0;
|
||||
const builder_util_1 = require("builder-util");
|
||||
const license_1 = require("app-builder-lib/out/util/license");
|
||||
const fs_extra_1 = require("fs-extra");
|
||||
const iconv = require("iconv-lite");
|
||||
const js_yaml_1 = require("js-yaml");
|
||||
const dmgUtil_1 = require("./dmgUtil");
|
||||
const licenseDefaultButtons_1 = require("./licenseDefaultButtons");
|
||||
async function getLicenseButtonsFile(packager) {
|
||||
return (0, license_1.getLicenseAssets)((await packager.resourceList).filter(it => {
|
||||
const name = it.toLowerCase();
|
||||
// noinspection SpellCheckingInspection
|
||||
return name.startsWith("licensebuttons_") && (name.endsWith(".json") || name.endsWith(".yml"));
|
||||
}), packager);
|
||||
}
|
||||
exports.getLicenseButtonsFile = getLicenseButtonsFile;
|
||||
async function getLicenseButtons(licenseButtonFiles, langWithRegion, id, name) {
|
||||
let data = (0, licenseDefaultButtons_1.getDefaultButtons)(langWithRegion, id, name);
|
||||
for (const item of licenseButtonFiles) {
|
||||
if (item.langWithRegion !== langWithRegion) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const fileData = (0, js_yaml_1.load)(await (0, fs_extra_1.readFile)(item.file, "utf-8"));
|
||||
const buttonsStr = labelToHex(fileData.lang, item.lang, item.langWithRegion) +
|
||||
labelToHex(fileData.agree, item.lang, item.langWithRegion) +
|
||||
labelToHex(fileData.disagree, item.lang, item.langWithRegion) +
|
||||
labelToHex(fileData.print, item.lang, item.langWithRegion) +
|
||||
labelToHex(fileData.save, item.lang, item.langWithRegion) +
|
||||
labelToHex(fileData.description, item.lang, item.langWithRegion);
|
||||
data = `data 'STR#' (${id}, "${name}") {\n`;
|
||||
data += (0, dmgUtil_1.serializeString)("0006" + buttonsStr);
|
||||
data += `\n};`;
|
||||
if (builder_util_1.log.isDebugEnabled) {
|
||||
builder_util_1.log.debug({ lang: item.langName, data }, `overwriting license buttons`);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
catch (e) {
|
||||
builder_util_1.log.debug({ error: e }, "cannot overwrite license buttons");
|
||||
return data;
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
exports.getLicenseButtons = getLicenseButtons;
|
||||
function labelToHex(label, lang, langWithRegion) {
|
||||
const lbl = hexEncode(label, lang, langWithRegion).toString().toUpperCase();
|
||||
const len = numberToHex(lbl.length / 2);
|
||||
return len + lbl;
|
||||
}
|
||||
function numberToHex(nb) {
|
||||
return ("0" + nb.toString(16)).slice(-2);
|
||||
}
|
||||
function hexEncode(str, lang, langWithRegion) {
|
||||
const macCodePages = getMacCodePage(lang, langWithRegion);
|
||||
let result = "";
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
try {
|
||||
let hex = getMacHexCode(str, i, macCodePages);
|
||||
if (hex === undefined) {
|
||||
hex = "3F"; //?
|
||||
}
|
||||
result += hex;
|
||||
}
|
||||
catch (e) {
|
||||
builder_util_1.log.debug({ error: e, char: str[i] }, "cannot convert");
|
||||
result += "3F"; //?
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function getMacCodePage(lang, langWithRegion) {
|
||||
switch (lang) {
|
||||
case "ja": //japanese
|
||||
return ["euc-jp"]; //Apple Japanese
|
||||
case "zh": //chinese
|
||||
if (langWithRegion === "zh_CN") {
|
||||
return ["gb2312"]; //Apple Simplified Chinese (GB 2312)
|
||||
}
|
||||
return ["big5"]; //Apple Traditional Chinese (Big5)
|
||||
case "ko": //korean
|
||||
return ["euc-kr"]; //Apple Korean
|
||||
case "ar": //arabic
|
||||
case "ur": //urdu
|
||||
return ["macarabic"]; //Apple Arabic
|
||||
case "he": //hebrew
|
||||
return ["machebrew"]; //Apple Hebrew
|
||||
case "el": //greek
|
||||
case "elc": //greek
|
||||
return ["macgreek"]; //Apple Greek
|
||||
case "ru": //russian
|
||||
case "be": //belarussian
|
||||
case "sr": //serbian
|
||||
case "bg": //bulgarian
|
||||
case "uz": //uzbek
|
||||
return ["maccyrillic"]; //Apple Macintosh Cyrillic
|
||||
case "ro": //romanian
|
||||
return ["macromania"]; //Apple Romanian
|
||||
case "uk": //ukrainian
|
||||
return ["macukraine"]; //Apple Ukrainian
|
||||
case "th": //thai
|
||||
return ["macthai"]; //Apple Thai
|
||||
case "et": //estonian
|
||||
case "lt": //lithuanian
|
||||
case "lv": //latvian
|
||||
case "pl": //polish
|
||||
case "hu": //hungarian
|
||||
case "cs": //czech
|
||||
case "sk": //slovak
|
||||
return ["maccenteuro"]; //Apple Macintosh Central Europe
|
||||
case "is": //icelandic
|
||||
case "fo": //faroese
|
||||
return ["maciceland"]; //Apple Icelandic
|
||||
case "tr": //turkish
|
||||
return ["macturkish"]; //Apple Turkish
|
||||
case "hr": //croatian
|
||||
case "sl": //slovenian
|
||||
return ["maccroatian"]; //Apple Croatian
|
||||
default:
|
||||
return ["macroman"]; //Apple Macintosh Roman
|
||||
}
|
||||
}
|
||||
function getMacHexCode(str, i, macCodePages) {
|
||||
const code = str.charCodeAt(i);
|
||||
if (code < 128) {
|
||||
return code.toString(16);
|
||||
}
|
||||
else if (code < 256) {
|
||||
return iconv.encode(str[i], "macroman").toString("hex");
|
||||
}
|
||||
else {
|
||||
for (let i = 0; i < macCodePages.length; i++) {
|
||||
const result = iconv.encode(str[i], macCodePages[i]).toString("hex");
|
||||
if (result !== undefined) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
return code;
|
||||
}
|
||||
//# sourceMappingURL=licenseButtons.js.map
|
||||
1
mc_test/node_modules/dmg-builder/out/licenseButtons.js.map
generated
vendored
Executable file
1
mc_test/node_modules/dmg-builder/out/licenseButtons.js.map
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
1
mc_test/node_modules/dmg-builder/out/licenseDefaultButtons.d.ts
generated
vendored
Executable file
1
mc_test/node_modules/dmg-builder/out/licenseDefaultButtons.d.ts
generated
vendored
Executable file
@ -0,0 +1 @@
|
||||
export declare function getDefaultButtons(langWithRegion: string, id: number, name: string): string;
|
||||
261
mc_test/node_modules/dmg-builder/out/licenseDefaultButtons.js
generated
vendored
Executable file
261
mc_test/node_modules/dmg-builder/out/licenseDefaultButtons.js
generated
vendored
Executable file
@ -0,0 +1,261 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getDefaultButtons = void 0;
|
||||
function getDefaultButtons(langWithRegion, id, name) {
|
||||
switch (langWithRegion) {
|
||||
case "de_DE":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 0744 6575 7473 6368 0B41 6B7A 6570"
|
||||
$"7469 6572 656E 0841 626C 6568 6E65 6E07"
|
||||
$"4472 7563 6B65 6E0A 5369 6368 6572 6E2E"
|
||||
$"2E2E E74B 6C69 636B 656E 2053 6965 2069"
|
||||
$"6E20 D241 6B7A 6570 7469 6572 656E D32C"
|
||||
$"2077 656E 6E20 5369 6520 6D69 7420 6465"
|
||||
$"6E20 4265 7374 696D 6D75 6E67 656E 2064"
|
||||
$"6573 2053 6F66 7477 6172 652D 4C69 7A65"
|
||||
$"6E7A 7665 7274 7261 6773 2065 696E 7665"
|
||||
$"7273 7461 6E64 656E 2073 696E 642E 2046"
|
||||
$"616C 6C73 206E 6963 6874 2C20 6269 7474"
|
||||
$"6520 D241 626C 6568 6E65 6ED3 2061 6E6B"
|
||||
$"6C69 636B 656E 2E20 5369 6520 6B9A 6E6E"
|
||||
$"656E 2064 6965 2053 6F66 7477 6172 6520"
|
||||
$"6E75 7220 696E 7374 616C 6C69 6572 656E"
|
||||
$"2C20 7765 6E6E 2053 6965 20D2 416B 7A65"
|
||||
$"7074 6965 7265 6ED3 2061 6E67 656B 6C69"
|
||||
$"636B 7420 6861 6265 6E2E"
|
||||
};`;
|
||||
case "fr_FR":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 0846 7261 6E8D 6169 7308 4163 6365"
|
||||
$"7074 6572 0752 6566 7573 6572 0849 6D70"
|
||||
$"7269 6D65 720E 456E 7265 6769 7374 7265"
|
||||
$"722E 2E2E BA53 6920 766F 7573 2061 6363"
|
||||
$"6570 7465 7A20 6C65 7320 7465 726D 6573"
|
||||
$"2064 6520 6C61 2070 728E 7365 6E74 6520"
|
||||
$"6C69 6365 6E63 652C 2063 6C69 7175 657A"
|
||||
$"2073 7572 2022 4163 6365 7074 6572 2220"
|
||||
$"6166 696E 2064 2769 6E73 7461 6C6C 6572"
|
||||
$"206C 6520 6C6F 6769 6369 656C 2E20 5369"
|
||||
$"2076 6F75 7320 6E27 9074 6573 2070 6173"
|
||||
$"2064 2761 6363 6F72 6420 6176 6563 206C"
|
||||
$"6573 2074 6572 6D65 7320 6465 206C 6120"
|
||||
$"6C69 6365 6E63 652C 2063 6C69 7175 657A"
|
||||
$"2073 7572 2022 5265 6675 7365 7222 2E"
|
||||
};`;
|
||||
case "fr_CA":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 1146 7261 6E8D 6169 7320 6361 6E61"
|
||||
$"6469 656E 0841 6363 6570 7465 7207 5265"
|
||||
$"6675 7365 7208 496D 7072 696D 6572 0E45"
|
||||
$"6E72 6567 6973 7472 6572 2E2E 2EBA 5369"
|
||||
$"2076 6F75 7320 6163 6365 7074 657A 206C"
|
||||
$"6573 2074 6572 6D65 7320 6465 206C 6120"
|
||||
$"7072 8E73 656E 7465 206C 6963 656E 6365"
|
||||
$"2C20 636C 6971 7565 7A20 7375 7220 2241"
|
||||
$"6363 6570 7465 7222 2061 6669 6E20 6427"
|
||||
$"696E 7374 616C 6C65 7220 6C65 206C 6F67"
|
||||
$"6963 6965 6C2E 2053 6920 766F 7573 206E"
|
||||
$"2790 7465 7320 7061 7320 6427 6163 636F"
|
||||
$"7264 2061 7665 6320 6C65 7320 7465 726D"
|
||||
$"6573 2064 6520 6C61 206C 6963 656E 6365"
|
||||
$"2C20 636C 6971 7565 7A20 7375 7220 2252"
|
||||
$"6566 7573 6572 222E"
|
||||
};`;
|
||||
case "es_ES":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 0745 7370 6196 6F6C 0741 6365 7074"
|
||||
$"6172 0A4E 6F20 6163 6570 7461 7208 496D"
|
||||
$"7072 696D 6972 0A47 7561 7264 6172 2E2E"
|
||||
$"2EC0 5369 2065 7374 8720 6465 2061 6375"
|
||||
$"6572 646F 2063 6F6E 206C 6F73 2074 8E72"
|
||||
$"6D69 6E6F 7320 6465 2065 7374 6120 6C69"
|
||||
$"6365 6E63 6961 2C20 7075 6C73 6520 2241"
|
||||
$"6365 7074 6172 2220 7061 7261 2069 6E73"
|
||||
$"7461 6C61 7220 656C 2073 6F66 7477 6172"
|
||||
$"652E 2045 6E20 656C 2073 7570 7565 7374"
|
||||
$"6F20 6465 2071 7565 206E 6F20 6573 748E"
|
||||
$"2064 6520 6163 7565 7264 6F20 636F 6E20"
|
||||
$"6C6F 7320 748E 726D 696E 6F73 2064 6520"
|
||||
$"6573 7461 206C 6963 656E 6369 612C 2070"
|
||||
$"756C 7365 2022 4E6F 2061 6365 7074 6172"
|
||||
$"2E22"
|
||||
};`;
|
||||
case "it_IT":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 0849 7461 6C69 616E 6F07 4163 6365"
|
||||
$"7474 6F07 5269 6669 7574 6F06 5374 616D"
|
||||
$"7061 0B52 6567 6973 7472 612E 2E2E 7F53"
|
||||
$"6520 6163 6365 7474 6920 6C65 2063 6F6E"
|
||||
$"6469 7A69 6F6E 6920 6469 2071 7565 7374"
|
||||
$"6120 6C69 6365 6E7A 612C 2066 6169 2063"
|
||||
$"6C69 6320 7375 2022 4163 6365 7474 6F22"
|
||||
$"2070 6572 2069 6E73 7461 6C6C 6172 6520"
|
||||
$"696C 2073 6F66 7477 6172 652E 2041 6C74"
|
||||
$"7269 6D65 6E74 6920 6661 6920 636C 6963"
|
||||
$"2073 7520 2252 6966 6975 746F 222E"
|
||||
};`;
|
||||
case "ja_JP":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 084A 6170 616E 6573 650A 93AF 88D3"
|
||||
$"82B5 82DC 82B7 0C93 AF88 D382 B582 DC82"
|
||||
$"B982 F108 88F3 8DFC 82B7 82E9 0795 DB91"
|
||||
$"B62E 2E2E B496 7B83 5C83 7483 6783 4583"
|
||||
$"4783 418E 6797 708B 9691 F88C 5F96 F182"
|
||||
$"CC8F F08C 8F82 C993 AF88 D382 B382 EA82"
|
||||
$"E98F EA8D 8782 C982 CD81 4183 5C83 7483"
|
||||
$"6783 4583 4783 4182 F083 4383 9383 5883"
|
||||
$"6781 5B83 8B82 B782 E982 BD82 DF82 C981"
|
||||
$"7593 AF88 D382 B582 DC82 B781 7682 F089"
|
||||
$"9F82 B582 C482 AD82 BE82 B382 A281 4281"
|
||||
$"4093 AF88 D382 B382 EA82 C882 A28F EA8D"
|
||||
$"8782 C982 CD81 4181 7593 AF88 D382 B582"
|
||||
$"DC82 B982 F181 7682 F089 9F82 B582 C482"
|
||||
$"AD82 BE82 B382 A281 42"
|
||||
};`;
|
||||
case "nl_NL":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 0A4E 6564 6572 6C61 6E64 7302 4A61"
|
||||
$"034E 6565 0550 7269 6E74 0942 6577 6161"
|
||||
$"722E 2E2E A449 6E64 6965 6E20 7520 616B"
|
||||
$"6B6F 6F72 6420 6761 6174 206D 6574 2064"
|
||||
$"6520 766F 6F72 7761 6172 6465 6E20 7661"
|
||||
$"6E20 6465 7A65 206C 6963 656E 7469 652C"
|
||||
$"206B 756E 7420 7520 6F70 2027 4A61 2720"
|
||||
$"6B6C 696B 6B65 6E20 6F6D 2064 6520 7072"
|
||||
$"6F67 7261 6D6D 6174 7575 7220 7465 2069"
|
||||
$"6E73 7461 6C6C 6572 656E 2E20 496E 6469"
|
||||
$"656E 2075 206E 6965 7420 616B 6B6F 6F72"
|
||||
$"6420 6761 6174 2C20 6B6C 696B 7420 7520"
|
||||
$"6F70 2027 4E65 6527 2E"
|
||||
};`;
|
||||
case "sv_SE":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 0653 7665 6E73 6B08 476F 646B 8A6E"
|
||||
$"6E73 0641 7662 9A6A 7308 536B 7269 7620"
|
||||
$"7574 0853 7061 7261 2E2E 2E93 4F6D 2044"
|
||||
$"7520 676F 646B 8A6E 6E65 7220 6C69 6365"
|
||||
$"6E73 7669 6C6C 6B6F 7265 6E20 6B6C 6963"
|
||||
$"6B61 2070 8C20 2247 6F64 6B8A 6E6E 7322"
|
||||
$"2066 9A72 2061 7474 2069 6E73 7461 6C6C"
|
||||
$"6572 6120 7072 6F67 7261 6D70 726F 6475"
|
||||
$"6B74 656E 2E20 4F6D 2044 7520 696E 7465"
|
||||
$"2067 6F64 6B8A 6E6E 6572 206C 6963 656E"
|
||||
$"7376 696C 6C6B 6F72 656E 2C20 6B6C 6963"
|
||||
$"6B61 2070 8C20 2241 7662 9A6A 7322 2E"
|
||||
};`;
|
||||
case "br_FR":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 1150 6F72 7475 6775 9073 2C20 4272"
|
||||
$"6173 696C 0943 6F6E 636F 7264 6172 0944"
|
||||
$"6973 636F 7264 6172 0849 6D70 7269 6D69"
|
||||
$"7209 5361 6C76 6172 2E2E 2E8C 5365 2065"
|
||||
$"7374 8720 6465 2061 636F 7264 6F20 636F"
|
||||
$"6D20 6F73 2074 6572 6D6F 7320 6465 7374"
|
||||
$"6120 6C69 6365 6E8D 612C 2070 7265 7373"
|
||||
$"696F 6E65 2022 436F 6E63 6F72 6461 7222"
|
||||
$"2070 6172 6120 696E 7374 616C 6172 206F"
|
||||
$"2073 6F66 7477 6172 652E 2053 6520 6E8B"
|
||||
$"6F20 6573 7487 2064 6520 6163 6F72 646F"
|
||||
$"2C20 7072 6573 7369 6F6E 6520 2244 6973"
|
||||
$"636F 7264 6172 222E"
|
||||
};`;
|
||||
case "zh_TW":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 1354 7261 6469 7469 6F6E 616C 2043"
|
||||
$"6869 6E65 7365 04A6 50B7 4E06 A4A3 A650"
|
||||
$"B74E 04A6 43A6 4C06 C078 A673 A14B 50A6"
|
||||
$"70AA 47B1 7AA6 50B7 4EA5 BBB3 5CA5 69C3"
|
||||
$"D2B8 CCAA BAB1 F8B4 DAA1 41BD D0AB F6A1"
|
||||
$"A7A6 50B7 4EA1 A8A5 48A6 77B8 CBB3 6EC5"
|
||||
$"E9A1 43A6 70AA 47A4 A3A6 50B7 4EA1 41BD"
|
||||
$"D0AB F6A1 A7A4 A3A6 50B7 4EA1 A8A1 43"
|
||||
};`;
|
||||
case "zh_CN":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 1253 696D 706C 6966 6965 6420 4368"
|
||||
$"696E 6573 6504 CDAC D2E2 06B2 BBCD ACD2"
|
||||
$"E204 B4F2 D3A1 06B4 E6B4 A2A1 AD54 C8E7"
|
||||
$"B9FB C4FA CDAC D2E2 B1BE D0ED BFC9 D0AD"
|
||||
$"D2E9 B5C4 CCF5 BFEE A3AC C7EB B0B4 A1B0"
|
||||
$"CDAC D2E2 A1B1 C0B4 B0B2 D7B0 B4CB C8ED"
|
||||
$"BCFE A1A3 C8E7 B9FB C4FA B2BB CDAC D2E2"
|
||||
$"A3AC C7EB B0B4 A1B0 B2BB CDAC D2E2 A1B1"
|
||||
$"A1A3"
|
||||
};`;
|
||||
case "da_DK":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 0544 616E 736B 0445 6E69 6705 5565"
|
||||
$"6E69 6707 5564 736B 7269 760A 4172 6B69"
|
||||
$"7665 722E 2E2E 9848 7669 7320 6475 2061"
|
||||
$"6363 6570 7465 7265 7220 6265 7469 6E67"
|
||||
$"656C 7365 726E 6520 6920 6C69 6365 6E73"
|
||||
$"6166 7461 6C65 6E2C 2073 6B61 6C20 6475"
|
||||
$"206B 6C69 6B6B 6520 708C 20D2 456E 6967"
|
||||
$"D320 666F 7220 6174 2069 6E73 7461 6C6C"
|
||||
$"6572 6520 736F 6674 7761 7265 6E2E 204B"
|
||||
$"6C69 6B20 708C 20D2 5565 6E69 67D3 2066"
|
||||
$"6F72 2061 7420 616E 6E75 6C6C 6572 6520"
|
||||
$"696E 7374 616C 6C65 7269 6E67 656E 2E"
|
||||
};`;
|
||||
case "fi_FI":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 0553 756F 6D69 0848 7976 8A6B 7379"
|
||||
$"6E0A 456E 2068 7976 8A6B 7379 0754 756C"
|
||||
$"6F73 7461 0954 616C 6C65 6E6E 61C9 6F48"
|
||||
$"7976 8A6B 7379 206C 6973 656E 7373 6973"
|
||||
$"6F70 696D 756B 7365 6E20 6568 646F 7420"
|
||||
$"6F73 6F69 7474 616D 616C 6C61 20D5 4879"
|
||||
$"768A 6B73 79D5 2E20 4A6F 7320 6574 2068"
|
||||
$"7976 8A6B 7379 2073 6F70 696D 756B 7365"
|
||||
$"6E20 6568 746F 6A61 2C20 6F73 6F69 7461"
|
||||
$"20D5 456E 2068 7976 8A6B 7379 D52E"
|
||||
};`;
|
||||
case "ko_KR":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 064B 6F72 6561 6E04 B5BF C0C7 09B5"
|
||||
$"BFC0 C720 BEC8 C7D4 06C7 C1B8 B0C6 AE07"
|
||||
$"C0FA C0E5 2E2E 2E7E BBE7 BFEB 20B0 E8BE"
|
||||
$"E0BC ADC0 C720 B3BB BFEB BFA1 20B5 BFC0"
|
||||
$"C7C7 CFB8 E92C 2022 B5BF C0C7 2220 B4DC"
|
||||
$"C3DF B8A6 20B4 ADB7 AF20 BCD2 C7C1 C6AE"
|
||||
$"BFFE BEEE B8A6 20BC B3C4 A1C7 CFBD CABD"
|
||||
$"C3BF C02E 20B5 BFC0 C7C7 CFC1 F620 BECA"
|
||||
$"B4C2 B4D9 B8E9 2C20 22B5 BFC0 C720 BEC8"
|
||||
$"C7D4 2220 B4DC C3DF B8A6 20B4 A9B8 A3BD"
|
||||
$"CABD C3BF C02E"
|
||||
};`;
|
||||
case "nb_NO":
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 054E 6F72 736B 0445 6E69 6709 496B"
|
||||
$"6B65 2065 6E69 6708 536B 7269 7620 7574"
|
||||
$"0A41 726B 6976 6572 2E2E 2EA3 4876 6973"
|
||||
$"2044 6520 6572 2065 6E69 6720 6920 6265"
|
||||
$"7374 656D 6D65 6C73 656E 6520 6920 6465"
|
||||
$"6E6E 6520 6C69 7365 6E73 6176 7461 6C65"
|
||||
$"6E2C 206B 6C69 6B6B 6572 2044 6520 708C"
|
||||
$"2022 456E 6967 222D 6B6E 6170 7065 6E20"
|
||||
$"666F 7220 8C20 696E 7374 616C 6C65 7265"
|
||||
$"2070 726F 6772 616D 7661 7265 6E2E 2048"
|
||||
$"7669 7320 4465 2069 6B6B 6520 6572 2065"
|
||||
$"6E69 672C 206B 6C69 6B6B 6572 2044 6520"
|
||||
$"708C 2022 496B 6B65 2065 6E69 6722 2E"
|
||||
};`;
|
||||
default:
|
||||
// en_US
|
||||
return `data 'STR#' (${id}, "${name}") {
|
||||
$"0006 0745 6E67 6C69 7368 0541 6772 6565"
|
||||
$"0844 6973 6167 7265 6505 5072 696E 7407"
|
||||
$"5361 7665 2E2E 2E7A 4966 2079 6F75 2061"
|
||||
$"6772 6565 2077 6974 6820 7468 6520 7465"
|
||||
$"726D 7320 6F66 2074 6869 7320 6C69 6365"
|
||||
$"6E73 652C 2070 7265 7373 20D2 4167 7265"
|
||||
$"65D3 2074 6F20 696E 7374 616C 6C20 7468"
|
||||
$"6520 736F 6674 7761 7265 2E20 4966 2079"
|
||||
$"6F75 2064 6F20 6E6F 7420 6167 7265 652C"
|
||||
$"2070 7265 7373 20D2 4469 7361 6772 6565"
|
||||
$"D32E"
|
||||
};`;
|
||||
}
|
||||
}
|
||||
exports.getDefaultButtons = getDefaultButtons;
|
||||
//# sourceMappingURL=licenseDefaultButtons.js.map
|
||||
1
mc_test/node_modules/dmg-builder/out/licenseDefaultButtons.js.map
generated
vendored
Executable file
1
mc_test/node_modules/dmg-builder/out/licenseDefaultButtons.js.map
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
36
mc_test/node_modules/dmg-builder/package.json
generated
vendored
Executable file
36
mc_test/node_modules/dmg-builder/package.json
generated
vendored
Executable file
@ -0,0 +1,36 @@
|
||||
{
|
||||
"name": "dmg-builder",
|
||||
"version": "24.13.3",
|
||||
"main": "out/dmgUtil.js",
|
||||
"author": "Vladimir Krivosheev",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/electron-userland/electron-builder.git",
|
||||
"directory": "packages/dmg-builder"
|
||||
},
|
||||
"bugs": "https://github.com/electron-userland/electron-builder/issues",
|
||||
"homepage": "https://github.com/electron-userland/electron-builder",
|
||||
"files": [
|
||||
"out",
|
||||
"templates",
|
||||
"vendor"
|
||||
],
|
||||
"dependencies": {
|
||||
"fs-extra": "^10.1.0",
|
||||
"iconv-lite": "^0.6.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"app-builder-lib": "24.13.3",
|
||||
"builder-util": "24.13.1",
|
||||
"builder-util-runtime": "9.2.4"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"dmg-license": "^1.0.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "9.0.13",
|
||||
"@types/js-yaml": "4.0.3",
|
||||
"temp-file": "3.4.0"
|
||||
},
|
||||
"typings": "./out/dmg.d.ts"
|
||||
}
|
||||
3
mc_test/node_modules/dmg-builder/readme.md
generated
vendored
Executable file
3
mc_test/node_modules/dmg-builder/readme.md
generated
vendored
Executable file
@ -0,0 +1,3 @@
|
||||
# dmg-builder
|
||||
|
||||
Utilities to build DMG. Used by [electron-builder](https://github.com/electron-userland/electron-builder).
|
||||
BIN
mc_test/node_modules/dmg-builder/templates/background.tiff
generated
vendored
Executable file
BIN
mc_test/node_modules/dmg-builder/templates/background.tiff
generated
vendored
Executable file
Binary file not shown.
977
mc_test/node_modules/dmg-builder/vendor/biplist/__init__.py
generated
vendored
Executable file
977
mc_test/node_modules/dmg-builder/vendor/biplist/__init__.py
generated
vendored
Executable file
@ -0,0 +1,977 @@
|
||||
"""biplist -- a library for reading and writing binary property list files.
|
||||
|
||||
Binary Property List (plist) files provide a faster and smaller serialization
|
||||
format for property lists on OS X. This is a library for generating binary
|
||||
plists which can be read by OS X, iOS, or other clients.
|
||||
|
||||
The API models the plistlib API, and will call through to plistlib when
|
||||
XML serialization or deserialization is required.
|
||||
|
||||
To generate plists with UID values, wrap the values with the Uid object. The
|
||||
value must be an int.
|
||||
|
||||
To generate plists with NSData/CFData values, wrap the values with the
|
||||
Data object. The value must be a string.
|
||||
|
||||
Date values can only be datetime.datetime objects.
|
||||
|
||||
The exceptions InvalidPlistException and NotBinaryPlistException may be
|
||||
thrown to indicate that the data cannot be serialized or deserialized as
|
||||
a binary plist.
|
||||
|
||||
Plist generation example:
|
||||
|
||||
from biplist import *
|
||||
from datetime import datetime
|
||||
plist = {'aKey':'aValue',
|
||||
'0':1.322,
|
||||
'now':datetime.now(),
|
||||
'list':[1,2,3],
|
||||
'tuple':('a','b','c')
|
||||
}
|
||||
try:
|
||||
writePlist(plist, "example.plist")
|
||||
except (InvalidPlistException, NotBinaryPlistException), e:
|
||||
print "Something bad happened:", e
|
||||
|
||||
Plist parsing example:
|
||||
|
||||
from biplist import *
|
||||
try:
|
||||
plist = readPlist("example.plist")
|
||||
print plist
|
||||
except (InvalidPlistException, NotBinaryPlistException), e:
|
||||
print "Not a plist:", e
|
||||
"""
|
||||
|
||||
from collections import namedtuple
|
||||
import datetime
|
||||
import io
|
||||
import math
|
||||
import plistlib
|
||||
from struct import pack, unpack, unpack_from
|
||||
from struct import error as struct_error
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
unicode
|
||||
unicodeEmpty = r''
|
||||
except NameError:
|
||||
unicode = str
|
||||
unicodeEmpty = ''
|
||||
try:
|
||||
long
|
||||
except NameError:
|
||||
long = int
|
||||
try:
|
||||
{}.iteritems
|
||||
iteritems = lambda x: x.iteritems()
|
||||
except AttributeError:
|
||||
iteritems = lambda x: x.items()
|
||||
|
||||
__all__ = [
|
||||
'Uid', 'Data', 'readPlist', 'writePlist', 'readPlistFromString',
|
||||
'writePlistToString', 'InvalidPlistException', 'NotBinaryPlistException'
|
||||
]
|
||||
|
||||
# Apple uses Jan 1, 2001 as a base for all plist date/times.
|
||||
apple_reference_date = datetime.datetime.utcfromtimestamp(978307200)
|
||||
|
||||
class Uid(object):
|
||||
"""Wrapper around integers for representing UID values. This
|
||||
is used in keyed archiving."""
|
||||
integer = 0
|
||||
def __init__(self, integer):
|
||||
self.integer = integer
|
||||
|
||||
def __repr__(self):
|
||||
return "Uid(%d)" % self.integer
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(self, Uid) and isinstance(other, Uid):
|
||||
return self.integer == other.integer
|
||||
return False
|
||||
|
||||
def __cmp__(self, other):
|
||||
return self.integer - other.integer
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.integer < other.integer
|
||||
|
||||
def __hash__(self):
|
||||
return self.integer
|
||||
|
||||
def __int__(self):
|
||||
return int(self.integer)
|
||||
|
||||
class Data(bytes):
|
||||
"""Wrapper around bytes to distinguish Data values."""
|
||||
|
||||
class InvalidPlistException(Exception):
|
||||
"""Raised when the plist is incorrectly formatted."""
|
||||
|
||||
class NotBinaryPlistException(Exception):
|
||||
"""Raised when a binary plist was expected but not encountered."""
|
||||
|
||||
def readPlist(pathOrFile):
|
||||
"""Raises NotBinaryPlistException, InvalidPlistException"""
|
||||
didOpen = False
|
||||
result = None
|
||||
if isinstance(pathOrFile, (bytes, unicode)):
|
||||
pathOrFile = open(pathOrFile, 'rb')
|
||||
didOpen = True
|
||||
try:
|
||||
reader = PlistReader(pathOrFile)
|
||||
result = reader.parse()
|
||||
except NotBinaryPlistException as e:
|
||||
try:
|
||||
pathOrFile.seek(0)
|
||||
result = None
|
||||
if hasattr(plistlib, 'loads'):
|
||||
contents = None
|
||||
if isinstance(pathOrFile, (bytes, unicode)):
|
||||
with open(pathOrFile, 'rb') as f:
|
||||
contents = f.read()
|
||||
else:
|
||||
contents = pathOrFile.read()
|
||||
result = plistlib.loads(contents)
|
||||
else:
|
||||
result = plistlib.readPlist(pathOrFile)
|
||||
result = wrapDataObject(result, for_binary=True)
|
||||
except Exception as e:
|
||||
raise InvalidPlistException(e)
|
||||
finally:
|
||||
if didOpen:
|
||||
pathOrFile.close()
|
||||
return result
|
||||
|
||||
def wrapDataObject(o, for_binary=False):
|
||||
if isinstance(o, Data) and not for_binary:
|
||||
v = sys.version_info
|
||||
if not (v[0] >= 3 and v[1] >= 4):
|
||||
o = plistlib.Data(o)
|
||||
elif isinstance(o, (bytes, plistlib.Data)) and for_binary:
|
||||
if hasattr(o, 'data'):
|
||||
o = Data(o.data)
|
||||
elif isinstance(o, tuple):
|
||||
o = wrapDataObject(list(o), for_binary)
|
||||
o = tuple(o)
|
||||
elif isinstance(o, list):
|
||||
for i in range(len(o)):
|
||||
o[i] = wrapDataObject(o[i], for_binary)
|
||||
elif isinstance(o, dict):
|
||||
for k in o:
|
||||
o[k] = wrapDataObject(o[k], for_binary)
|
||||
return o
|
||||
|
||||
def writePlist(rootObject, pathOrFile, binary=True):
|
||||
if not binary:
|
||||
rootObject = wrapDataObject(rootObject, binary)
|
||||
if hasattr(plistlib, "dump"):
|
||||
if isinstance(pathOrFile, (bytes, unicode)):
|
||||
with open(pathOrFile, 'wb') as f:
|
||||
return plistlib.dump(rootObject, f)
|
||||
else:
|
||||
return plistlib.dump(rootObject, pathOrFile)
|
||||
else:
|
||||
return plistlib.writePlist(rootObject, pathOrFile)
|
||||
else:
|
||||
didOpen = False
|
||||
if isinstance(pathOrFile, (bytes, unicode)):
|
||||
pathOrFile = open(pathOrFile, 'wb')
|
||||
didOpen = True
|
||||
writer = PlistWriter(pathOrFile)
|
||||
result = writer.writeRoot(rootObject)
|
||||
if didOpen:
|
||||
pathOrFile.close()
|
||||
return result
|
||||
|
||||
def readPlistFromString(data):
|
||||
return readPlist(io.BytesIO(data))
|
||||
|
||||
def writePlistToString(rootObject, binary=True):
|
||||
if not binary:
|
||||
rootObject = wrapDataObject(rootObject, binary)
|
||||
if hasattr(plistlib, "dumps"):
|
||||
return plistlib.dumps(rootObject)
|
||||
elif hasattr(plistlib, "writePlistToBytes"):
|
||||
return plistlib.writePlistToBytes(rootObject)
|
||||
else:
|
||||
return plistlib.writePlistToString(rootObject)
|
||||
else:
|
||||
ioObject = io.BytesIO()
|
||||
writer = PlistWriter(ioObject)
|
||||
writer.writeRoot(rootObject)
|
||||
return ioObject.getvalue()
|
||||
|
||||
def is_stream_binary_plist(stream):
|
||||
stream.seek(0)
|
||||
header = stream.read(7)
|
||||
if header == b'bplist0':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
PlistTrailer = namedtuple('PlistTrailer', 'offsetSize, objectRefSize, offsetCount, topLevelObjectNumber, offsetTableOffset')
|
||||
PlistByteCounts = namedtuple('PlistByteCounts', 'nullBytes, boolBytes, intBytes, realBytes, dateBytes, dataBytes, stringBytes, uidBytes, arrayBytes, setBytes, dictBytes')
|
||||
|
||||
class PlistReader(object):
|
||||
file = None
|
||||
contents = ''
|
||||
offsets = None
|
||||
trailer = None
|
||||
currentOffset = 0
|
||||
# Used to detect recursive object references.
|
||||
offsetsStack = []
|
||||
|
||||
def __init__(self, fileOrStream):
|
||||
"""Raises NotBinaryPlistException."""
|
||||
self.reset()
|
||||
self.file = fileOrStream
|
||||
|
||||
def parse(self):
|
||||
return self.readRoot()
|
||||
|
||||
def reset(self):
|
||||
self.trailer = None
|
||||
self.contents = ''
|
||||
self.offsets = []
|
||||
self.currentOffset = 0
|
||||
self.offsetsStack = []
|
||||
|
||||
def readRoot(self):
|
||||
result = None
|
||||
self.reset()
|
||||
# Get the header, make sure it's a valid file.
|
||||
if not is_stream_binary_plist(self.file):
|
||||
raise NotBinaryPlistException()
|
||||
self.file.seek(0)
|
||||
self.contents = self.file.read()
|
||||
if len(self.contents) < 32:
|
||||
raise InvalidPlistException("File is too short.")
|
||||
trailerContents = self.contents[-32:]
|
||||
try:
|
||||
self.trailer = PlistTrailer._make(unpack("!xxxxxxBBQQQ", trailerContents))
|
||||
|
||||
if pow(2, self.trailer.offsetSize*8) < self.trailer.offsetTableOffset:
|
||||
raise InvalidPlistException("Offset size insufficient to reference all objects.")
|
||||
|
||||
if pow(2, self.trailer.objectRefSize*8) < self.trailer.offsetCount:
|
||||
raise InvalidPlistException("Too many offsets to represent in size of object reference representation.")
|
||||
|
||||
offset_size = self.trailer.offsetSize * self.trailer.offsetCount
|
||||
offset = self.trailer.offsetTableOffset
|
||||
|
||||
if offset + offset_size > pow(2, 64):
|
||||
raise InvalidPlistException("Offset table is excessively long.")
|
||||
|
||||
if self.trailer.offsetSize > 16:
|
||||
raise InvalidPlistException("Offset size is greater than maximum integer size.")
|
||||
|
||||
if self.trailer.objectRefSize == 0:
|
||||
raise InvalidPlistException("Object reference size is zero.")
|
||||
|
||||
if offset >= len(self.contents) - 32:
|
||||
raise InvalidPlistException("Offset table offset is too large.")
|
||||
|
||||
if offset < len("bplist00x"):
|
||||
raise InvalidPlistException("Offset table offset is too small.")
|
||||
|
||||
if self.trailer.topLevelObjectNumber >= self.trailer.offsetCount:
|
||||
raise InvalidPlistException("Top level object number is larger than the number of objects.")
|
||||
|
||||
offset_contents = self.contents[offset:offset+offset_size]
|
||||
offset_i = 0
|
||||
offset_table_length = len(offset_contents)
|
||||
|
||||
while offset_i < self.trailer.offsetCount:
|
||||
begin = self.trailer.offsetSize*offset_i
|
||||
end = begin+self.trailer.offsetSize
|
||||
if end > offset_table_length:
|
||||
raise InvalidPlistException("End of object is at invalid offset %d in offset table of length %d" % (end, offset_table_length))
|
||||
tmp_contents = offset_contents[begin:end]
|
||||
tmp_sized = self.getSizedInteger(tmp_contents, self.trailer.offsetSize)
|
||||
self.offsets.append(tmp_sized)
|
||||
offset_i += 1
|
||||
self.setCurrentOffsetToObjectNumber(self.trailer.topLevelObjectNumber)
|
||||
result = self.readObject()
|
||||
except TypeError as e:
|
||||
raise InvalidPlistException(e)
|
||||
return result
|
||||
|
||||
def setCurrentOffsetToObjectNumber(self, objectNumber):
|
||||
if objectNumber > len(self.offsets) - 1:
|
||||
raise InvalidPlistException("Invalid offset number: %d" % objectNumber)
|
||||
self.currentOffset = self.offsets[objectNumber]
|
||||
if self.currentOffset in self.offsetsStack:
|
||||
raise InvalidPlistException("Recursive data structure detected in object: %d" % objectNumber)
|
||||
|
||||
def beginOffsetProtection(self):
|
||||
self.offsetsStack.append(self.currentOffset)
|
||||
return self.currentOffset
|
||||
|
||||
def endOffsetProtection(self, offset):
|
||||
try:
|
||||
index = self.offsetsStack.index(offset)
|
||||
self.offsetsStack = self.offsetsStack[:index]
|
||||
except ValueError as e:
|
||||
pass
|
||||
|
||||
def readObject(self):
|
||||
protection = self.beginOffsetProtection()
|
||||
result = None
|
||||
tmp_byte = self.contents[self.currentOffset:self.currentOffset+1]
|
||||
if len(tmp_byte) != 1:
|
||||
raise InvalidPlistException("No object found at offset: %d" % self.currentOffset)
|
||||
marker_byte = unpack("!B", tmp_byte)[0]
|
||||
format = (marker_byte >> 4) & 0x0f
|
||||
extra = marker_byte & 0x0f
|
||||
self.currentOffset += 1
|
||||
|
||||
def proc_extra(extra):
|
||||
if extra == 0b1111:
|
||||
extra = self.readObject()
|
||||
return extra
|
||||
|
||||
# bool, null, or fill byte
|
||||
if format == 0b0000:
|
||||
if extra == 0b0000:
|
||||
result = None
|
||||
elif extra == 0b1000:
|
||||
result = False
|
||||
elif extra == 0b1001:
|
||||
result = True
|
||||
elif extra == 0b1111:
|
||||
pass # fill byte
|
||||
else:
|
||||
raise InvalidPlistException("Invalid object found at offset: %d" % (self.currentOffset - 1))
|
||||
# int
|
||||
elif format == 0b0001:
|
||||
result = self.readInteger(pow(2, extra))
|
||||
# real
|
||||
elif format == 0b0010:
|
||||
result = self.readReal(extra)
|
||||
# date
|
||||
elif format == 0b0011 and extra == 0b0011:
|
||||
result = self.readDate()
|
||||
# data
|
||||
elif format == 0b0100:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readData(extra)
|
||||
# ascii string
|
||||
elif format == 0b0101:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readAsciiString(extra)
|
||||
# Unicode string
|
||||
elif format == 0b0110:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readUnicode(extra)
|
||||
# uid
|
||||
elif format == 0b1000:
|
||||
result = self.readUid(extra)
|
||||
# array
|
||||
elif format == 0b1010:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readArray(extra)
|
||||
# set
|
||||
elif format == 0b1100:
|
||||
extra = proc_extra(extra)
|
||||
result = set(self.readArray(extra))
|
||||
# dict
|
||||
elif format == 0b1101:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readDict(extra)
|
||||
else:
|
||||
raise InvalidPlistException("Invalid object found: {format: %s, extra: %s}" % (bin(format), bin(extra)))
|
||||
self.endOffsetProtection(protection)
|
||||
return result
|
||||
|
||||
def readContents(self, length, description="Object contents"):
|
||||
end = self.currentOffset + length
|
||||
if end >= len(self.contents) - 32:
|
||||
raise InvalidPlistException("%s extends into trailer" % description)
|
||||
elif length < 0:
|
||||
raise InvalidPlistException("%s length is less than zero" % length)
|
||||
data = self.contents[self.currentOffset:end]
|
||||
return data
|
||||
|
||||
def readInteger(self, byteSize):
|
||||
data = self.readContents(byteSize, "Integer")
|
||||
self.currentOffset = self.currentOffset + byteSize
|
||||
return self.getSizedInteger(data, byteSize, as_number=True)
|
||||
|
||||
def readReal(self, length):
|
||||
to_read = pow(2, length)
|
||||
data = self.readContents(to_read, "Real")
|
||||
if length == 2: # 4 bytes
|
||||
result = unpack('>f', data)[0]
|
||||
elif length == 3: # 8 bytes
|
||||
result = unpack('>d', data)[0]
|
||||
else:
|
||||
raise InvalidPlistException("Unknown Real of length %d bytes" % to_read)
|
||||
return result
|
||||
|
||||
def readRefs(self, count):
|
||||
refs = []
|
||||
i = 0
|
||||
while i < count:
|
||||
fragment = self.readContents(self.trailer.objectRefSize, "Object reference")
|
||||
ref = self.getSizedInteger(fragment, len(fragment))
|
||||
refs.append(ref)
|
||||
self.currentOffset += self.trailer.objectRefSize
|
||||
i += 1
|
||||
return refs
|
||||
|
||||
def readArray(self, count):
|
||||
if not isinstance(count, (int, long)):
|
||||
raise InvalidPlistException("Count of entries in dict isn't of integer type.")
|
||||
result = []
|
||||
values = self.readRefs(count)
|
||||
i = 0
|
||||
while i < len(values):
|
||||
self.setCurrentOffsetToObjectNumber(values[i])
|
||||
value = self.readObject()
|
||||
result.append(value)
|
||||
i += 1
|
||||
return result
|
||||
|
||||
def readDict(self, count):
|
||||
if not isinstance(count, (int, long)):
|
||||
raise InvalidPlistException("Count of keys/values in dict isn't of integer type.")
|
||||
result = {}
|
||||
keys = self.readRefs(count)
|
||||
values = self.readRefs(count)
|
||||
i = 0
|
||||
while i < len(keys):
|
||||
self.setCurrentOffsetToObjectNumber(keys[i])
|
||||
key = self.readObject()
|
||||
self.setCurrentOffsetToObjectNumber(values[i])
|
||||
value = self.readObject()
|
||||
result[key] = value
|
||||
i += 1
|
||||
return result
|
||||
|
||||
def readAsciiString(self, length):
|
||||
if not isinstance(length, (int, long)):
|
||||
raise InvalidPlistException("Length of ASCII string isn't of integer type.")
|
||||
data = self.readContents(length, "ASCII string")
|
||||
result = unpack("!%ds" % length, data)[0]
|
||||
self.currentOffset += length
|
||||
return str(result.decode('ascii'))
|
||||
|
||||
def readUnicode(self, length):
|
||||
if not isinstance(length, (int, long)):
|
||||
raise InvalidPlistException("Length of Unicode string isn't of integer type.")
|
||||
actual_length = length*2
|
||||
data = self.readContents(actual_length, "Unicode string")
|
||||
self.currentOffset += actual_length
|
||||
return data.decode('utf_16_be')
|
||||
|
||||
def readDate(self):
|
||||
data = self.readContents(8, "Date")
|
||||
x = unpack(">d", data)[0]
|
||||
if math.isnan(x):
|
||||
raise InvalidPlistException("Date is NaN")
|
||||
# Use timedelta to workaround time_t size limitation on 32-bit python.
|
||||
try:
|
||||
result = datetime.timedelta(seconds=x) + apple_reference_date
|
||||
except OverflowError:
|
||||
if x > 0:
|
||||
result = datetime.datetime.max
|
||||
else:
|
||||
result = datetime.datetime.min
|
||||
self.currentOffset += 8
|
||||
return result
|
||||
|
||||
def readData(self, length):
|
||||
if not isinstance(length, (int, long)):
|
||||
raise InvalidPlistException("Length of data isn't of integer type.")
|
||||
result = self.readContents(length, "Data")
|
||||
self.currentOffset += length
|
||||
return Data(result)
|
||||
|
||||
def readUid(self, length):
|
||||
if not isinstance(length, (int, long)):
|
||||
raise InvalidPlistException("Uid length isn't of integer type.")
|
||||
return Uid(self.readInteger(length+1))
|
||||
|
||||
def getSizedInteger(self, data, byteSize, as_number=False):
|
||||
"""Numbers of 8 bytes are signed integers when they refer to numbers, but unsigned otherwise."""
|
||||
result = 0
|
||||
if byteSize == 0:
|
||||
raise InvalidPlistException("Encountered integer with byte size of 0.")
|
||||
# 1, 2, and 4 byte integers are unsigned
|
||||
elif byteSize == 1:
|
||||
result = unpack('>B', data)[0]
|
||||
elif byteSize == 2:
|
||||
result = unpack('>H', data)[0]
|
||||
elif byteSize == 4:
|
||||
result = unpack('>L', data)[0]
|
||||
elif byteSize == 8:
|
||||
if as_number:
|
||||
result = unpack('>q', data)[0]
|
||||
else:
|
||||
result = unpack('>Q', data)[0]
|
||||
elif byteSize <= 16:
|
||||
# Handle odd-sized or integers larger than 8 bytes
|
||||
# Don't naively go over 16 bytes, in order to prevent infinite loops.
|
||||
result = 0
|
||||
if hasattr(int, 'from_bytes'):
|
||||
result = int.from_bytes(data, 'big')
|
||||
else:
|
||||
for byte in data:
|
||||
if not isinstance(byte, int): # Python3.0-3.1.x return ints, 2.x return str
|
||||
byte = unpack_from('>B', byte)[0]
|
||||
result = (result << 8) | byte
|
||||
else:
|
||||
raise InvalidPlistException("Encountered integer longer than 16 bytes.")
|
||||
return result
|
||||
|
||||
class HashableWrapper(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
def __repr__(self):
|
||||
return "<HashableWrapper: %s>" % [self.value]
|
||||
|
||||
class BoolWrapper(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
def __repr__(self):
|
||||
return "<BoolWrapper: %s>" % self.value
|
||||
|
||||
class FloatWrapper(object):
|
||||
_instances = {}
|
||||
def __new__(klass, value):
|
||||
# Ensure FloatWrapper(x) for a given float x is always the same object
|
||||
wrapper = klass._instances.get(value)
|
||||
if wrapper is None:
|
||||
wrapper = object.__new__(klass)
|
||||
wrapper.value = value
|
||||
klass._instances[value] = wrapper
|
||||
return wrapper
|
||||
def __repr__(self):
|
||||
return "<FloatWrapper: %s>" % self.value
|
||||
|
||||
class StringWrapper(object):
|
||||
__instances = {}
|
||||
|
||||
encodedValue = None
|
||||
encoding = None
|
||||
|
||||
def __new__(cls, value):
|
||||
'''Ensure we only have a only one instance for any string,
|
||||
and that we encode ascii as 1-byte-per character when possible'''
|
||||
|
||||
encodedValue = None
|
||||
|
||||
for encoding in ('ascii', 'utf_16_be'):
|
||||
try:
|
||||
encodedValue = value.encode(encoding)
|
||||
except: pass
|
||||
if encodedValue is not None:
|
||||
if encodedValue not in cls.__instances:
|
||||
cls.__instances[encodedValue] = super(StringWrapper, cls).__new__(cls)
|
||||
cls.__instances[encodedValue].encodedValue = encodedValue
|
||||
cls.__instances[encodedValue].encoding = encoding
|
||||
return cls.__instances[encodedValue]
|
||||
|
||||
raise ValueError('Unable to get ascii or utf_16_be encoding for %s' % repr(value))
|
||||
|
||||
def __len__(self):
|
||||
'''Return roughly the number of characters in this string (half the byte length)'''
|
||||
if self.encoding == 'ascii':
|
||||
return len(self.encodedValue)
|
||||
else:
|
||||
return len(self.encodedValue)//2
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.encodedValue < other.encodedValue
|
||||
|
||||
@property
|
||||
def encodingMarker(self):
|
||||
if self.encoding == 'ascii':
|
||||
return 0b0101
|
||||
else:
|
||||
return 0b0110
|
||||
|
||||
def __repr__(self):
|
||||
return '<StringWrapper (%s): %s>' % (self.encoding, self.encodedValue)
|
||||
|
||||
class PlistWriter(object):
|
||||
header = b'bplist00bybiplist1.0'
|
||||
file = None
|
||||
byteCounts = None
|
||||
trailer = None
|
||||
computedUniques = None
|
||||
writtenReferences = None
|
||||
referencePositions = None
|
||||
wrappedTrue = None
|
||||
wrappedFalse = None
|
||||
# Used to detect recursive object references.
|
||||
objectsStack = []
|
||||
|
||||
def __init__(self, file):
|
||||
self.reset()
|
||||
self.file = file
|
||||
self.wrappedTrue = BoolWrapper(True)
|
||||
self.wrappedFalse = BoolWrapper(False)
|
||||
|
||||
def reset(self):
|
||||
self.byteCounts = PlistByteCounts(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
|
||||
self.trailer = PlistTrailer(0, 0, 0, 0, 0)
|
||||
|
||||
# A set of all the uniques which have been computed.
|
||||
self.computedUniques = set()
|
||||
# A list of all the uniques which have been written.
|
||||
self.writtenReferences = {}
|
||||
# A dict of the positions of the written uniques.
|
||||
self.referencePositions = {}
|
||||
|
||||
self.objectsStack = []
|
||||
|
||||
def positionOfObjectReference(self, obj):
|
||||
"""If the given object has been written already, return its
|
||||
position in the offset table. Otherwise, return None."""
|
||||
return self.writtenReferences.get(obj)
|
||||
|
||||
def writeRoot(self, root):
|
||||
"""
|
||||
Strategy is:
|
||||
- write header
|
||||
- wrap root object so everything is hashable
|
||||
- compute size of objects which will be written
|
||||
- need to do this in order to know how large the object refs
|
||||
will be in the list/dict/set reference lists
|
||||
- write objects
|
||||
- keep objects in writtenReferences
|
||||
- keep positions of object references in referencePositions
|
||||
- write object references with the length computed previously
|
||||
- computer object reference length
|
||||
- write object reference positions
|
||||
- write trailer
|
||||
"""
|
||||
output = self.header
|
||||
wrapped_root = self.wrapRoot(root)
|
||||
self.computeOffsets(wrapped_root, asReference=True, isRoot=True)
|
||||
self.trailer = self.trailer._replace(**{'objectRefSize':self.intSize(len(self.computedUniques))})
|
||||
self.writeObjectReference(wrapped_root, output)
|
||||
output = self.writeObject(wrapped_root, output, setReferencePosition=True)
|
||||
|
||||
# output size at this point is an upper bound on how big the
|
||||
# object reference offsets need to be.
|
||||
self.trailer = self.trailer._replace(**{
|
||||
'offsetSize':self.intSize(len(output)),
|
||||
'offsetCount':len(self.computedUniques),
|
||||
'offsetTableOffset':len(output),
|
||||
'topLevelObjectNumber':0
|
||||
})
|
||||
|
||||
output = self.writeOffsetTable(output)
|
||||
output += pack('!xxxxxxBBQQQ', *self.trailer)
|
||||
self.file.write(output)
|
||||
|
||||
def beginRecursionProtection(self, obj):
|
||||
if not isinstance(obj, (set, dict, list, tuple)):
|
||||
return
|
||||
if id(obj) in self.objectsStack:
|
||||
raise InvalidPlistException("Recursive containers are not allowed in plists.")
|
||||
self.objectsStack.append(id(obj))
|
||||
|
||||
def endRecursionProtection(self, obj):
|
||||
if not isinstance(obj, (set, dict, list, tuple)):
|
||||
return
|
||||
try:
|
||||
index = self.objectsStack.index(id(obj))
|
||||
self.objectsStack = self.objectsStack[:index]
|
||||
except ValueError as e:
|
||||
pass
|
||||
|
||||
def wrapRoot(self, root):
|
||||
result = None
|
||||
self.beginRecursionProtection(root)
|
||||
|
||||
if isinstance(root, bool):
|
||||
if root is True:
|
||||
result = self.wrappedTrue
|
||||
else:
|
||||
result = self.wrappedFalse
|
||||
elif isinstance(root, float):
|
||||
result = FloatWrapper(root)
|
||||
elif isinstance(root, set):
|
||||
n = set()
|
||||
for value in root:
|
||||
n.add(self.wrapRoot(value))
|
||||
result = HashableWrapper(n)
|
||||
elif isinstance(root, dict):
|
||||
n = {}
|
||||
for key, value in iteritems(root):
|
||||
n[self.wrapRoot(key)] = self.wrapRoot(value)
|
||||
result = HashableWrapper(n)
|
||||
elif isinstance(root, list):
|
||||
n = []
|
||||
for value in root:
|
||||
n.append(self.wrapRoot(value))
|
||||
result = HashableWrapper(n)
|
||||
elif isinstance(root, tuple):
|
||||
n = tuple([self.wrapRoot(value) for value in root])
|
||||
result = HashableWrapper(n)
|
||||
elif isinstance(root, (str, unicode)) and not isinstance(root, Data):
|
||||
result = StringWrapper(root)
|
||||
elif isinstance(root, bytes):
|
||||
result = Data(root)
|
||||
else:
|
||||
result = root
|
||||
|
||||
self.endRecursionProtection(root)
|
||||
return result
|
||||
|
||||
def incrementByteCount(self, field, incr=1):
|
||||
self.byteCounts = self.byteCounts._replace(**{field:self.byteCounts.__getattribute__(field) + incr})
|
||||
|
||||
def computeOffsets(self, obj, asReference=False, isRoot=False):
|
||||
def check_key(key):
|
||||
if key is None:
|
||||
raise InvalidPlistException('Dictionary keys cannot be null in plists.')
|
||||
elif isinstance(key, Data):
|
||||
raise InvalidPlistException('Data cannot be dictionary keys in plists.')
|
||||
elif not isinstance(key, StringWrapper):
|
||||
raise InvalidPlistException('Keys must be strings.')
|
||||
|
||||
def proc_size(size):
|
||||
if size > 0b1110:
|
||||
size += self.intSize(size)
|
||||
return size
|
||||
# If this should be a reference, then we keep a record of it in the
|
||||
# uniques table.
|
||||
if asReference:
|
||||
if obj in self.computedUniques:
|
||||
return
|
||||
else:
|
||||
self.computedUniques.add(obj)
|
||||
|
||||
if obj is None:
|
||||
self.incrementByteCount('nullBytes')
|
||||
elif isinstance(obj, BoolWrapper):
|
||||
self.incrementByteCount('boolBytes')
|
||||
elif isinstance(obj, Uid):
|
||||
size = self.intSize(obj.integer)
|
||||
self.incrementByteCount('uidBytes', incr=1+size)
|
||||
elif isinstance(obj, (int, long)):
|
||||
size = self.intSize(obj)
|
||||
self.incrementByteCount('intBytes', incr=1+size)
|
||||
elif isinstance(obj, FloatWrapper):
|
||||
size = self.realSize(obj)
|
||||
self.incrementByteCount('realBytes', incr=1+size)
|
||||
elif isinstance(obj, datetime.datetime):
|
||||
self.incrementByteCount('dateBytes', incr=2)
|
||||
elif isinstance(obj, Data):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('dataBytes', incr=1+size)
|
||||
elif isinstance(obj, StringWrapper):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('stringBytes', incr=1+size)
|
||||
elif isinstance(obj, HashableWrapper):
|
||||
obj = obj.value
|
||||
if isinstance(obj, set):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('setBytes', incr=1+size)
|
||||
for value in obj:
|
||||
self.computeOffsets(value, asReference=True)
|
||||
elif isinstance(obj, (list, tuple)):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('arrayBytes', incr=1+size)
|
||||
for value in obj:
|
||||
asRef = True
|
||||
self.computeOffsets(value, asReference=True)
|
||||
elif isinstance(obj, dict):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('dictBytes', incr=1+size)
|
||||
for key, value in iteritems(obj):
|
||||
check_key(key)
|
||||
self.computeOffsets(key, asReference=True)
|
||||
self.computeOffsets(value, asReference=True)
|
||||
else:
|
||||
raise InvalidPlistException("Unknown object type: %s (%s)" % (type(obj).__name__, repr(obj)))
|
||||
|
||||
def writeObjectReference(self, obj, output):
|
||||
"""Tries to write an object reference, adding it to the references
|
||||
table. Does not write the actual object bytes or set the reference
|
||||
position. Returns a tuple of whether the object was a new reference
|
||||
(True if it was, False if it already was in the reference table)
|
||||
and the new output.
|
||||
"""
|
||||
position = self.positionOfObjectReference(obj)
|
||||
if position is None:
|
||||
self.writtenReferences[obj] = len(self.writtenReferences)
|
||||
output += self.binaryInt(len(self.writtenReferences) - 1, byteSize=self.trailer.objectRefSize)
|
||||
return (True, output)
|
||||
else:
|
||||
output += self.binaryInt(position, byteSize=self.trailer.objectRefSize)
|
||||
return (False, output)
|
||||
|
||||
def writeObject(self, obj, output, setReferencePosition=False):
|
||||
"""Serializes the given object to the output. Returns output.
|
||||
If setReferencePosition is True, will set the position the
|
||||
object was written.
|
||||
"""
|
||||
def proc_variable_length(format, length):
|
||||
result = b''
|
||||
if length > 0b1110:
|
||||
result += pack('!B', (format << 4) | 0b1111)
|
||||
result = self.writeObject(length, result)
|
||||
else:
|
||||
result += pack('!B', (format << 4) | length)
|
||||
return result
|
||||
|
||||
def timedelta_total_seconds(td):
|
||||
# Shim for Python 2.6 compatibility, which doesn't have total_seconds.
|
||||
# Make one argument a float to ensure the right calculation.
|
||||
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10.0**6) / 10.0**6
|
||||
|
||||
if setReferencePosition:
|
||||
self.referencePositions[obj] = len(output)
|
||||
|
||||
if obj is None:
|
||||
output += pack('!B', 0b00000000)
|
||||
elif isinstance(obj, BoolWrapper):
|
||||
if obj.value is False:
|
||||
output += pack('!B', 0b00001000)
|
||||
else:
|
||||
output += pack('!B', 0b00001001)
|
||||
elif isinstance(obj, Uid):
|
||||
size = self.intSize(obj.integer)
|
||||
output += pack('!B', (0b1000 << 4) | size - 1)
|
||||
output += self.binaryInt(obj.integer)
|
||||
elif isinstance(obj, (int, long)):
|
||||
byteSize = self.intSize(obj)
|
||||
root = math.log(byteSize, 2)
|
||||
output += pack('!B', (0b0001 << 4) | int(root))
|
||||
output += self.binaryInt(obj, as_number=True)
|
||||
elif isinstance(obj, FloatWrapper):
|
||||
# just use doubles
|
||||
output += pack('!B', (0b0010 << 4) | 3)
|
||||
output += self.binaryReal(obj)
|
||||
elif isinstance(obj, datetime.datetime):
|
||||
try:
|
||||
timestamp = (obj - apple_reference_date).total_seconds()
|
||||
except AttributeError:
|
||||
timestamp = timedelta_total_seconds(obj - apple_reference_date)
|
||||
output += pack('!B', 0b00110011)
|
||||
output += pack('!d', float(timestamp))
|
||||
elif isinstance(obj, Data):
|
||||
output += proc_variable_length(0b0100, len(obj))
|
||||
output += obj
|
||||
elif isinstance(obj, StringWrapper):
|
||||
output += proc_variable_length(obj.encodingMarker, len(obj))
|
||||
output += obj.encodedValue
|
||||
elif isinstance(obj, bytes):
|
||||
output += proc_variable_length(0b0101, len(obj))
|
||||
output += obj
|
||||
elif isinstance(obj, HashableWrapper):
|
||||
obj = obj.value
|
||||
if isinstance(obj, (set, list, tuple)):
|
||||
if isinstance(obj, set):
|
||||
output += proc_variable_length(0b1100, len(obj))
|
||||
else:
|
||||
output += proc_variable_length(0b1010, len(obj))
|
||||
|
||||
objectsToWrite = []
|
||||
for objRef in sorted(obj) if isinstance(obj, set) else obj:
|
||||
(isNew, output) = self.writeObjectReference(objRef, output)
|
||||
if isNew:
|
||||
objectsToWrite.append(objRef)
|
||||
for objRef in objectsToWrite:
|
||||
output = self.writeObject(objRef, output, setReferencePosition=True)
|
||||
elif isinstance(obj, dict):
|
||||
output += proc_variable_length(0b1101, len(obj))
|
||||
keys = []
|
||||
values = []
|
||||
objectsToWrite = []
|
||||
for key, value in sorted(iteritems(obj)):
|
||||
keys.append(key)
|
||||
values.append(value)
|
||||
for key in keys:
|
||||
(isNew, output) = self.writeObjectReference(key, output)
|
||||
if isNew:
|
||||
objectsToWrite.append(key)
|
||||
for value in values:
|
||||
(isNew, output) = self.writeObjectReference(value, output)
|
||||
if isNew:
|
||||
objectsToWrite.append(value)
|
||||
for objRef in objectsToWrite:
|
||||
output = self.writeObject(objRef, output, setReferencePosition=True)
|
||||
return output
|
||||
|
||||
def writeOffsetTable(self, output):
|
||||
"""Writes all of the object reference offsets."""
|
||||
all_positions = []
|
||||
writtenReferences = list(self.writtenReferences.items())
|
||||
writtenReferences.sort(key=lambda x: x[1])
|
||||
for obj,order in writtenReferences:
|
||||
# Porting note: Elsewhere we deliberately replace empty unicdoe strings
|
||||
# with empty binary strings, but the empty unicode string
|
||||
# goes into writtenReferences. This isn't an issue in Py2
|
||||
# because u'' and b'' have the same hash; but it is in
|
||||
# Py3, where they don't.
|
||||
if bytes != str and obj == unicodeEmpty:
|
||||
obj = b''
|
||||
position = self.referencePositions.get(obj)
|
||||
if position is None:
|
||||
raise InvalidPlistException("Error while writing offsets table. Object not found. %s" % obj)
|
||||
output += self.binaryInt(position, self.trailer.offsetSize)
|
||||
all_positions.append(position)
|
||||
return output
|
||||
|
||||
def binaryReal(self, obj):
|
||||
# just use doubles
|
||||
result = pack('>d', obj.value)
|
||||
return result
|
||||
|
||||
def binaryInt(self, obj, byteSize=None, as_number=False):
|
||||
result = b''
|
||||
if byteSize is None:
|
||||
byteSize = self.intSize(obj)
|
||||
if byteSize == 1:
|
||||
result += pack('>B', obj)
|
||||
elif byteSize == 2:
|
||||
result += pack('>H', obj)
|
||||
elif byteSize == 4:
|
||||
result += pack('>L', obj)
|
||||
elif byteSize == 8:
|
||||
if as_number:
|
||||
result += pack('>q', obj)
|
||||
else:
|
||||
result += pack('>Q', obj)
|
||||
elif byteSize <= 16:
|
||||
try:
|
||||
result = pack('>Q', 0) + pack('>Q', obj)
|
||||
except struct_error as e:
|
||||
raise InvalidPlistException("Unable to pack integer %d: %s" % (obj, e))
|
||||
else:
|
||||
raise InvalidPlistException("Core Foundation can't handle integers with size greater than 16 bytes.")
|
||||
return result
|
||||
|
||||
def intSize(self, obj):
|
||||
"""Returns the number of bytes necessary to store the given integer."""
|
||||
# SIGNED
|
||||
if obj < 0: # Signed integer, always 8 bytes
|
||||
return 8
|
||||
# UNSIGNED
|
||||
elif obj <= 0xFF: # 1 byte
|
||||
return 1
|
||||
elif obj <= 0xFFFF: # 2 bytes
|
||||
return 2
|
||||
elif obj <= 0xFFFFFFFF: # 4 bytes
|
||||
return 4
|
||||
# SIGNED
|
||||
# 0x7FFFFFFFFFFFFFFF is the max.
|
||||
elif obj <= 0x7FFFFFFFFFFFFFFF: # 8 bytes signed
|
||||
return 8
|
||||
elif obj <= 0xffffffffffffffff: # 8 bytes unsigned
|
||||
return 16
|
||||
else:
|
||||
raise InvalidPlistException("Core Foundation can't handle integers with size greater than 8 bytes.")
|
||||
|
||||
def realSize(self, obj):
|
||||
return 8
|
||||
143
mc_test/node_modules/dmg-builder/vendor/dmgbuild/badge.py
generated
vendored
Executable file
143
mc_test/node_modules/dmg-builder/vendor/dmgbuild/badge.py
generated
vendored
Executable file
@ -0,0 +1,143 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from Quartz import *
|
||||
import math
|
||||
|
||||
_REMOVABLE_DISK_PATH = '/System/Library/Extensions/IOStorageFamily.kext/Contents/Resources/Removable.icns'
|
||||
|
||||
def badge_disk_icon(badge_file, output_file):
|
||||
# Load the Removable disk icon
|
||||
url = CFURLCreateWithFileSystemPath(None, _REMOVABLE_DISK_PATH,
|
||||
kCFURLPOSIXPathStyle, False)
|
||||
backdrop = CGImageSourceCreateWithURL(url, None)
|
||||
backdropCount = CGImageSourceGetCount(backdrop)
|
||||
|
||||
# Load the badge
|
||||
url = CFURLCreateWithFileSystemPath(None, badge_file,
|
||||
kCFURLPOSIXPathStyle, False)
|
||||
badge = CGImageSourceCreateWithURL(url, None)
|
||||
assert badge is not None, 'Unable to process image file: %s' % badge_file
|
||||
badgeCount = CGImageSourceGetCount(badge)
|
||||
|
||||
# Set up a destination for our target
|
||||
url = CFURLCreateWithFileSystemPath(None, output_file,
|
||||
kCFURLPOSIXPathStyle, False)
|
||||
target = CGImageDestinationCreateWithURL(url, 'com.apple.icns',
|
||||
backdropCount, None)
|
||||
|
||||
# Get the RGB colorspace
|
||||
rgbColorSpace = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB)
|
||||
|
||||
# Scale
|
||||
scale = 1.0
|
||||
|
||||
# Perspective transform
|
||||
corners = ((0.2, 0.95), (0.8, 0.95), (0.85, 0.35), (0.15, 0.35))
|
||||
|
||||
# Translation
|
||||
position = (0.5, 0.5)
|
||||
|
||||
for n in range(backdropCount):
|
||||
props = CGImageSourceCopyPropertiesAtIndex(backdrop, n, None)
|
||||
width = props['PixelWidth']
|
||||
height = props['PixelHeight']
|
||||
dpi = props['DPIWidth']
|
||||
depth = props['Depth']
|
||||
|
||||
# Choose the best sized badge image
|
||||
bestWidth = None
|
||||
bestHeight = None
|
||||
bestBadge = None
|
||||
bestDepth = None
|
||||
bestDPI = None
|
||||
for m in range(badgeCount):
|
||||
badgeProps = CGImageSourceCopyPropertiesAtIndex(badge, m, None)
|
||||
badgeWidth = badgeProps['PixelWidth']
|
||||
badgeHeight = badgeProps['PixelHeight']
|
||||
badgeDPI = badgeProps['DPIWidth']
|
||||
badgeDepth = badgeProps['Depth']
|
||||
|
||||
if bestBadge is None or (badgeWidth <= width
|
||||
and (bestWidth > width
|
||||
or badgeWidth > bestWidth
|
||||
or (badgeWidth == bestWidth
|
||||
and badgeDPI == dpi
|
||||
and badgeDepth <= depth
|
||||
and (bestDepth is None
|
||||
or badgeDepth > bestDepth)))):
|
||||
bestBadge = m
|
||||
bestWidth = badgeWidth
|
||||
bestHeight = badgeHeight
|
||||
bestDPI = badgeDPI
|
||||
bestDepth = badgeDepth
|
||||
|
||||
badgeImage = CGImageSourceCreateImageAtIndex(badge, bestBadge, None)
|
||||
badgeCI = CIImage.imageWithCGImage_(badgeImage)
|
||||
|
||||
backgroundImage = CGImageSourceCreateImageAtIndex(backdrop, n, None)
|
||||
backgroundCI = CIImage.imageWithCGImage_(backgroundImage)
|
||||
|
||||
compositor = CIFilter.filterWithName_('CISourceOverCompositing')
|
||||
lanczos = CIFilter.filterWithName_('CILanczosScaleTransform')
|
||||
perspective = CIFilter.filterWithName_('CIPerspectiveTransform')
|
||||
transform = CIFilter.filterWithName_('CIAffineTransform')
|
||||
|
||||
lanczos.setValue_forKey_(badgeCI, kCIInputImageKey)
|
||||
lanczos.setValue_forKey_(scale * float(width)/bestWidth, kCIInputScaleKey)
|
||||
lanczos.setValue_forKey_(1.0, kCIInputAspectRatioKey)
|
||||
|
||||
topLeft = (width * scale * corners[0][0],
|
||||
width * scale * corners[0][1])
|
||||
topRight = (width * scale * corners[1][0],
|
||||
width * scale * corners[1][1])
|
||||
bottomRight = (width * scale * corners[2][0],
|
||||
width * scale * corners[2][1])
|
||||
bottomLeft = (width * scale * corners[3][0],
|
||||
width * scale * corners[3][1])
|
||||
|
||||
out = lanczos.valueForKey_(kCIOutputImageKey)
|
||||
if width >= 16:
|
||||
perspective.setValue_forKey_(out, kCIInputImageKey)
|
||||
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*topLeft),
|
||||
'inputTopLeft')
|
||||
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*topRight),
|
||||
'inputTopRight')
|
||||
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*bottomRight),
|
||||
'inputBottomRight')
|
||||
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*bottomLeft),
|
||||
'inputBottomLeft')
|
||||
out = perspective.valueForKey_(kCIOutputImageKey)
|
||||
|
||||
tfm = NSAffineTransform.transform()
|
||||
tfm.translateXBy_yBy_(math.floor((position[0] - 0.5 * scale) * width),
|
||||
math.floor((position[1] - 0.5 * scale) * height))
|
||||
|
||||
transform.setValue_forKey_(out, kCIInputImageKey)
|
||||
transform.setValue_forKey_(tfm, 'inputTransform')
|
||||
out = transform.valueForKey_(kCIOutputImageKey)
|
||||
|
||||
compositor.setValue_forKey_(out, kCIInputImageKey)
|
||||
compositor.setValue_forKey_(backgroundCI, kCIInputBackgroundImageKey)
|
||||
|
||||
result = compositor.valueForKey_(kCIOutputImageKey)
|
||||
|
||||
cgContext = CGBitmapContextCreate(None,
|
||||
width,
|
||||
height,
|
||||
8,
|
||||
0,
|
||||
rgbColorSpace,
|
||||
kCGImageAlphaPremultipliedLast)
|
||||
context = CIContext.contextWithCGContext_options_(cgContext, None)
|
||||
|
||||
context.drawImage_inRect_fromRect_(result,
|
||||
((0, 0), (width, height)),
|
||||
((0, 0), (width, height)))
|
||||
|
||||
image = CGBitmapContextCreateImage(cgContext)
|
||||
|
||||
CGImageDestinationAddImage(target, image, props)
|
||||
|
||||
CGImageDestinationFinalize(target)
|
||||
|
||||
494
mc_test/node_modules/dmg-builder/vendor/dmgbuild/colors.py
generated
vendored
Executable file
494
mc_test/node_modules/dmg-builder/vendor/dmgbuild/colors.py
generated
vendored
Executable file
@ -0,0 +1,494 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
import math
|
||||
|
||||
class Color (object):
|
||||
def to_rgb(self):
|
||||
raise Exception('Must implement to_rgb() in subclasses')
|
||||
|
||||
class RGB (Color):
|
||||
def __init__(self, r, g, b):
|
||||
self.r = r
|
||||
self.g = g
|
||||
self.b = b
|
||||
|
||||
def to_rgb(self):
|
||||
return self
|
||||
|
||||
class HSL (Color):
|
||||
def __init__(self, h, s, l):
|
||||
self.h = h
|
||||
self.s = s
|
||||
self.l = l
|
||||
|
||||
@staticmethod
|
||||
def _hue_to_rgb(t1, t2, hue):
|
||||
if hue < 0:
|
||||
hue += 6
|
||||
elif hue >= 6:
|
||||
hue -= 6
|
||||
|
||||
if hue < 1:
|
||||
return (t2 - t1) * hue + t1
|
||||
elif hue < 3:
|
||||
return t2
|
||||
elif hue < 4:
|
||||
return (t2 - t1) * (4 - hue) + t1
|
||||
else:
|
||||
return t1
|
||||
|
||||
def to_rgb(self):
|
||||
hue = self.h / 60.0
|
||||
if self.l <= 0.5:
|
||||
t2 = self.l * (self.s + 1)
|
||||
else:
|
||||
t2 = self.l + self.s - (self.l * self.s)
|
||||
t1 = self.l * 2 - t2
|
||||
r = self._hue_to_rgb(t1, t2, hue + 2)
|
||||
g = self._hue_to_rgb(t1, t2, hue)
|
||||
b = self._hue_to_rgb(t1, t2, hue - 2)
|
||||
return RGB(r, g, b)
|
||||
|
||||
class HWB (Color):
|
||||
def __init__(self, h, w, b):
|
||||
self.h = h
|
||||
self.w = w
|
||||
self.b = b
|
||||
|
||||
@staticmethod
|
||||
def _hue_to_rgb(hue):
|
||||
if hue < 0:
|
||||
hue += 6
|
||||
elif hue >= 6:
|
||||
hue -= 6
|
||||
|
||||
if hue < 1:
|
||||
return hue
|
||||
elif hue < 3:
|
||||
return 1
|
||||
elif hue < 4:
|
||||
return (4 - hue)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def to_rgb(self):
|
||||
hue = self.h / 60.0
|
||||
t1 = 1 - self.w - self.b
|
||||
r = self._hue_to_rgb(hue + 2) * t1 + self.w
|
||||
g = self._hue_to_rgb(hue) * t1 + self.w
|
||||
b = self._hue_to_rgb(hue - 2) * t1 + self.w
|
||||
return RGB(r, g, b)
|
||||
|
||||
class CMYK (Color):
|
||||
def __init__(self, c, m, y, k):
|
||||
self.c = c
|
||||
self.m = m
|
||||
self.y = y
|
||||
self.k = k
|
||||
|
||||
def to_rgb(self):
|
||||
r = 1.0 - min(1.0, self.c + self.k)
|
||||
g = 1.0 - min(1.0, self.m + self.k)
|
||||
b = 1.0 - min(1.0, self.y + self.k)
|
||||
return RGB(r, g, b)
|
||||
|
||||
class Gray (Color):
|
||||
def __init__(self, g):
|
||||
self.g = g
|
||||
|
||||
def to_rgb(self):
|
||||
return RGB(g, g, g)
|
||||
|
||||
_x11_colors = {
|
||||
'aliceblue': (240, 248, 255),
|
||||
'antiquewhite': (250, 235, 215),
|
||||
'aqua': ( 0, 255, 255),
|
||||
'aquamarine': (127, 255, 212),
|
||||
'azure': (240, 255, 255),
|
||||
'beige': (245, 245, 220),
|
||||
'bisque': (255, 228, 196),
|
||||
'black': ( 0, 0, 0),
|
||||
'blanchedalmond': (255, 235, 205),
|
||||
'blue': ( 0, 0, 255),
|
||||
'blueviolet': (138, 43, 226),
|
||||
'brown': (165, 42, 42),
|
||||
'burlywood': (222, 184, 135),
|
||||
'cadetblue': ( 95, 158, 160),
|
||||
'chartreuse': (127, 255, 0),
|
||||
'chocolate': (210, 105, 30),
|
||||
'coral': (255, 127, 80),
|
||||
'cornflowerblue': (100, 149, 237),
|
||||
'cornsilk': (255, 248, 220),
|
||||
'crimson': (220, 20, 60),
|
||||
'cyan': ( 0, 255, 255),
|
||||
'darkblue': ( 0, 0, 139),
|
||||
'darkcyan': ( 0, 139, 139),
|
||||
'darkgoldenrod': (184, 134, 11),
|
||||
'darkgray': (169, 169, 169),
|
||||
'darkgreen': ( 0, 100, 0),
|
||||
'darkgrey': (169, 169, 169),
|
||||
'darkkhaki': (189, 183, 107),
|
||||
'darkmagenta': (139, 0, 139),
|
||||
'darkolivegreen': ( 85, 107, 47),
|
||||
'darkorange': (255, 140, 0),
|
||||
'darkorchid': (153, 50, 204),
|
||||
'darkred': (139, 0, 0),
|
||||
'darksalmon': (233, 150, 122),
|
||||
'darkseagreen': (143, 188, 143),
|
||||
'darkslateblue': ( 72, 61, 139),
|
||||
'darkslategray': ( 47, 79, 79),
|
||||
'darkslategrey': ( 47, 79, 79),
|
||||
'darkturquoise': ( 0, 206, 209),
|
||||
'darkviolet': (148, 0, 211),
|
||||
'deeppink': (255, 20, 147),
|
||||
'deepskyblue': ( 0, 191, 255),
|
||||
'dimgray': (105, 105, 105),
|
||||
'dimgrey': (105, 105, 105),
|
||||
'dodgerblue': ( 30, 144, 255),
|
||||
'firebrick': (178, 34, 34),
|
||||
'floralwhite': (255, 250, 240),
|
||||
'forestgreen': ( 34, 139, 34),
|
||||
'fuchsia': (255, 0, 255),
|
||||
'gainsboro': (220, 220, 220),
|
||||
'ghostwhite': (248, 248, 255),
|
||||
'gold': (255, 215, 0),
|
||||
'goldenrod': (218, 165, 32),
|
||||
'gray': (128, 128, 128),
|
||||
'grey': (128, 128, 128),
|
||||
'green': ( 0, 128, 0),
|
||||
'greenyellow': (173, 255, 47),
|
||||
'honeydew': (240, 255, 240),
|
||||
'hotpink': (255, 105, 180),
|
||||
'indianred': (205, 92, 92),
|
||||
'indigo': ( 75, 0, 130),
|
||||
'ivory': (255, 255, 240),
|
||||
'khaki': (240, 230, 140),
|
||||
'lavender': (230, 230, 250),
|
||||
'lavenderblush': (255, 240, 245),
|
||||
'lawngreen': (124, 252, 0),
|
||||
'lemonchiffon': (255, 250, 205),
|
||||
'lightblue': (173, 216, 230),
|
||||
'lightcoral': (240, 128, 128),
|
||||
'lightcyan': (224, 255, 255),
|
||||
'lightgoldenrodyellow': (250, 250, 210),
|
||||
'lightgray': (211, 211, 211),
|
||||
'lightgreen': (144, 238, 144),
|
||||
'lightgrey': (211, 211, 211),
|
||||
'lightpink': (255, 182, 193),
|
||||
'lightsalmon': (255, 160, 122),
|
||||
'lightseagreen': ( 32, 178, 170),
|
||||
'lightskyblue': (135, 206, 250),
|
||||
'lightslategray': (119, 136, 153),
|
||||
'lightslategrey': (119, 136, 153),
|
||||
'lightsteelblue': (176, 196, 222),
|
||||
'lightyellow': (255, 255, 224),
|
||||
'lime': ( 0, 255, 0),
|
||||
'limegreen': ( 50, 205, 50),
|
||||
'linen': (250, 240, 230),
|
||||
'magenta': (255, 0, 255),
|
||||
'maroon': (128, 0, 0),
|
||||
'mediumaquamarine': (102, 205, 170),
|
||||
'mediumblue': ( 0, 0, 205),
|
||||
'mediumorchid': (186, 85, 211),
|
||||
'mediumpurple': (147, 112, 219),
|
||||
'mediumseagreen': ( 60, 179, 113),
|
||||
'mediumslateblue': (123, 104, 238),
|
||||
'mediumspringgreen': ( 0, 250, 154),
|
||||
'mediumturquoise': ( 72, 209, 204),
|
||||
'mediumvioletred': (199, 21, 133),
|
||||
'midnightblue': ( 25, 25, 112),
|
||||
'mintcream': (245, 255, 250),
|
||||
'mistyrose': (255, 228, 225),
|
||||
'moccasin': (255, 228, 181),
|
||||
'navajowhite': (255, 222, 173),
|
||||
'navy': ( 0, 0, 128),
|
||||
'oldlace': (253, 245, 230),
|
||||
'olive': (128, 128, 0),
|
||||
'olivedrab': (107, 142, 35),
|
||||
'orange': (255, 165, 0),
|
||||
'orangered': (255, 69, 0),
|
||||
'orchid': (218, 112, 214),
|
||||
'palegoldenrod': (238, 232, 170),
|
||||
'palegreen': (152, 251, 152),
|
||||
'paleturquoise': (175, 238, 238),
|
||||
'palevioletred': (219, 112, 147),
|
||||
'papayawhip': (255, 239, 213),
|
||||
'peachpuff': (255, 218, 185),
|
||||
'peru': (205, 133, 63),
|
||||
'pink': (255, 192, 203),
|
||||
'plum': (221, 160, 221),
|
||||
'powderblue': (176, 224, 230),
|
||||
'purple': (128, 0, 128),
|
||||
'red': (255, 0, 0),
|
||||
'rosybrown': (188, 143, 143),
|
||||
'royalblue': ( 65, 105, 225),
|
||||
'saddlebrown': (139, 69, 19),
|
||||
'salmon': (250, 128, 114),
|
||||
'sandybrown': (244, 164, 96),
|
||||
'seagreen': ( 46, 139, 87),
|
||||
'seashell': (255, 245, 238),
|
||||
'sienna': (160, 82, 45),
|
||||
'silver': (192, 192, 192),
|
||||
'skyblue': (135, 206, 235),
|
||||
'slateblue': (106, 90, 205),
|
||||
'slategray': (112, 128, 144),
|
||||
'slategrey': (112, 128, 144),
|
||||
'snow': (255, 250, 250),
|
||||
'springgreen': ( 0, 255, 127),
|
||||
'steelblue': ( 70, 130, 180),
|
||||
'tan': (210, 180, 140),
|
||||
'teal': ( 0, 128, 128),
|
||||
'thistle': (216, 191, 216),
|
||||
'tomato': (255, 99, 71),
|
||||
'turquoise': ( 64, 224, 208),
|
||||
'violet': (238, 130, 238),
|
||||
'wheat': (245, 222, 179),
|
||||
'white': (255, 255, 255),
|
||||
'whitesmoke': (245, 245, 245),
|
||||
'yellow': (255, 255, 0),
|
||||
'yellowgreen': (154, 205, 50)
|
||||
}
|
||||
|
||||
_ws_re = re.compile('\s+')
|
||||
_token_re = re.compile('[A-Za-z_][A-Za-z0-9_]*')
|
||||
_hex_re = re.compile('#([0-9a-f]{3}(?:[0-9a-f]{3})?)$')
|
||||
_number_re = re.compile('[0-9]*(\.[0-9]*)')
|
||||
|
||||
class ColorParser (object):
|
||||
def __init__(self, s):
|
||||
self._string = s
|
||||
self._pos = 0
|
||||
|
||||
def skipws(self):
|
||||
m = _ws_re.match(self._string, self._pos)
|
||||
if m:
|
||||
self._pos = m.end(0)
|
||||
|
||||
def expect(self, s, context=''):
|
||||
if len(self._string) - self._pos < len(s) \
|
||||
or self._string[self._pos:self._pos + len(s)] != s:
|
||||
raise ValueError('bad color "%s" - expected "%s"%s'
|
||||
% (self._string, s, context))
|
||||
self._pos += len(s)
|
||||
|
||||
def expectEnd(self):
|
||||
if self._pos != len(self._string):
|
||||
raise ValueError('junk at end of color "%s"' % self._string)
|
||||
|
||||
def getToken(self):
|
||||
m = _token_re.match(self._string, self._pos)
|
||||
if m:
|
||||
token = m.group(0)
|
||||
|
||||
self._pos = m.end(0)
|
||||
return token
|
||||
return None
|
||||
|
||||
def parseNumber(self, context=''):
|
||||
m = _number_re.match(self._string, self._pos)
|
||||
if m:
|
||||
self._pos = m.end(0)
|
||||
return float(m.group(0))
|
||||
raise ValueError('bad color "%s" - expected a number%s'
|
||||
% (self._string, context))
|
||||
|
||||
def parseColor(self):
|
||||
self.skipws()
|
||||
|
||||
token = self.getToken()
|
||||
if token:
|
||||
if token == 'rgb':
|
||||
return self.parseRGB()
|
||||
elif token == 'hsl':
|
||||
return self.parseHSL()
|
||||
elif token == 'hwb':
|
||||
return self.parseHWB()
|
||||
elif token == 'cmyk':
|
||||
return self.parseCMYK()
|
||||
elif token == 'gray' or token == 'grey':
|
||||
return self.parseGray()
|
||||
|
||||
try:
|
||||
r, g, b = _x11_colors[token]
|
||||
except KeyError:
|
||||
raise ValueError('unknown color name "%s"' % token)
|
||||
|
||||
self.expectEnd()
|
||||
|
||||
return RGB(r / 255.0, g / 255.0, b / 255.0)
|
||||
|
||||
m = _hex_re.match(self._string, self._pos)
|
||||
if m:
|
||||
hrgb = m.group(1)
|
||||
|
||||
if len(hrgb) == 3:
|
||||
r = int('0x' + 2 * hrgb[0], 16)
|
||||
g = int('0x' + 2 * hrgb[1], 16)
|
||||
b = int('0x' + 2 * hrgb[2], 16)
|
||||
else:
|
||||
r = int('0x' + hrgb[0:2], 16)
|
||||
g = int('0x' + hrgb[2:4], 16)
|
||||
b = int('0x' + hrgb[4:6], 16)
|
||||
|
||||
self._pos = m.end(0)
|
||||
self.skipws()
|
||||
|
||||
self.expectEnd()
|
||||
|
||||
return RGB(r / 255.0, g / 255.0, b / 255.0)
|
||||
|
||||
raise ValueError('bad color syntax "%s"' % self._string)
|
||||
|
||||
def parseRGB(self):
|
||||
self.expect('(', 'after "rgb"')
|
||||
self.skipws()
|
||||
|
||||
r = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "rgb"')
|
||||
self.skipws()
|
||||
|
||||
g = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "rgb"')
|
||||
self.skipws()
|
||||
|
||||
b = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "rgb"')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return RGB(r, g, b)
|
||||
|
||||
def parseHSL(self):
|
||||
self.expect('(', 'after "hsl"')
|
||||
self.skipws()
|
||||
|
||||
h = self.parseAngle()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "hsl"')
|
||||
self.skipws()
|
||||
|
||||
s = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "hsl"')
|
||||
self.skipws()
|
||||
|
||||
l = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "hsl"')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return HSL(h, s, l)
|
||||
|
||||
def parseHWB(self):
|
||||
self.expect('(', 'after "hwb"')
|
||||
self.skipws()
|
||||
|
||||
h = self.parseAngle()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "hwb"')
|
||||
self.skipws()
|
||||
|
||||
w = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "hwb"')
|
||||
self.skipws()
|
||||
|
||||
b = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "hwb"')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return HWB(h, w, b)
|
||||
|
||||
def parseCMYK(self):
|
||||
self.expect('(', 'after "cmyk"')
|
||||
self.skipws()
|
||||
|
||||
c = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "cmyk"')
|
||||
self.skipws()
|
||||
|
||||
m = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "cmyk"')
|
||||
self.skipws()
|
||||
|
||||
y = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "cmyk"')
|
||||
self.skipws()
|
||||
|
||||
k = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "cmyk"')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return CMYK(c, m, y, k)
|
||||
|
||||
def parseGray(self):
|
||||
self.expect('(', 'after "gray"')
|
||||
self.skipws()
|
||||
|
||||
g = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "gray')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return Gray(g)
|
||||
|
||||
def parseValue(self):
|
||||
n = self.parseNumber()
|
||||
self.skipws()
|
||||
if self._string[self._pos] == '%':
|
||||
n = n / 100.0
|
||||
self.pos += 1
|
||||
return n
|
||||
|
||||
def parseAngle(self):
|
||||
n = self.parseNumber()
|
||||
self.skipws()
|
||||
tok = self.getToken()
|
||||
if tok == 'rad':
|
||||
n = n * 180.0 / math.pi
|
||||
elif tok == 'grad' or tok == 'gon':
|
||||
n = n * 0.9
|
||||
elif tok != 'deg':
|
||||
raise ValueError('bad angle unit "%s"' % tok)
|
||||
return n
|
||||
|
||||
_color_re = re.compile('\s*(#|rgb|hsl|hwb|cmyk|gray|grey|%s)'
|
||||
% '|'.join(_x11_colors.keys()))
|
||||
def isAColor(s):
|
||||
return _color_re.match(s)
|
||||
|
||||
def parseColor(s):
|
||||
return ColorParser(s).parseColor()
|
||||
290
mc_test/node_modules/dmg-builder/vendor/dmgbuild/core.py
generated
vendored
Executable file
290
mc_test/node_modules/dmg-builder/vendor/dmgbuild/core.py
generated
vendored
Executable file
@ -0,0 +1,290 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
try:
|
||||
from importlib import reload
|
||||
except ImportError:
|
||||
from imp import reload
|
||||
reload(sys) # To workaround the unbound issue
|
||||
else:
|
||||
reload(sys) # Reload is a hack
|
||||
sys.setdefaultencoding('UTF8')
|
||||
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), "..")))
|
||||
|
||||
try:
|
||||
{}.iteritems
|
||||
iteritems = lambda x: x.iteritems()
|
||||
iterkeys = lambda x: x.iterkeys()
|
||||
except AttributeError:
|
||||
iteritems = lambda x: x.items()
|
||||
iterkeys = lambda x: x.keys()
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
import biplist
|
||||
from mac_alias import *
|
||||
from ds_store import *
|
||||
|
||||
from colors import parseColor
|
||||
|
||||
try:
|
||||
from badge import badge
|
||||
except ImportError:
|
||||
badge = None
|
||||
|
||||
class DMGError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def build_dmg():
|
||||
options = {
|
||||
'icon': None,
|
||||
'badge_icon': None,
|
||||
'sidebar_width': 180,
|
||||
'arrange_by': None,
|
||||
'grid_offset': (0, 0),
|
||||
'grid_spacing': 100.0,
|
||||
'scroll_position': (0.0, 0.0),
|
||||
'show_icon_preview': False,
|
||||
'text_size': os.environ['iconTextSize'],
|
||||
'icon_size': os.environ['iconSize'],
|
||||
'include_icon_view_settings': 'auto',
|
||||
'include_list_view_settings': 'auto',
|
||||
'list_icon_size': 16.0,
|
||||
'list_text_size': 12.0,
|
||||
'list_scroll_position': (0, 0),
|
||||
'list_sort_by': 'name',
|
||||
'list_columns': ('name', 'date-modified', 'size', 'kind', 'date-added'),
|
||||
'list_column_widths': {
|
||||
'name': 300,
|
||||
'date-modified': 181,
|
||||
'date-created': 181,
|
||||
'date-added': 181,
|
||||
'date-last-opened': 181,
|
||||
'size': 97,
|
||||
'kind': 115,
|
||||
'label': 100,
|
||||
'version': 75,
|
||||
'comments': 300,
|
||||
},
|
||||
'list_column_sort_directions': {
|
||||
'name': 'ascending',
|
||||
'date-modified': 'descending',
|
||||
'date-created': 'descending',
|
||||
'date-added': 'descending',
|
||||
'date-last-opened': 'descending',
|
||||
'size': 'descending',
|
||||
'kind': 'ascending',
|
||||
'label': 'ascending',
|
||||
'version': 'ascending',
|
||||
'comments': 'ascending',
|
||||
}
|
||||
}
|
||||
|
||||
# Set up the finder data
|
||||
bwsp = {
|
||||
'ShowStatusBar': False,
|
||||
'ContainerShowSidebar': False,
|
||||
'PreviewPaneVisibility': False,
|
||||
'SidebarWidth': options['sidebar_width'],
|
||||
'ShowTabView': False,
|
||||
'ShowToolbar': False,
|
||||
'ShowPathbar': False,
|
||||
'ShowSidebar': False
|
||||
}
|
||||
|
||||
window_x = os.environ.get('windowX')
|
||||
if window_x:
|
||||
window_y = os.environ['windowY']
|
||||
bwsp['WindowBounds'] = '{{%s, %s}, {%s, %s}}' % (window_x,
|
||||
window_y,
|
||||
os.environ['windowWidth'],
|
||||
os.environ['windowHeight'])
|
||||
|
||||
arrange_options = {
|
||||
'name': 'name',
|
||||
'date-modified': 'dateModified',
|
||||
'date-created': 'dateCreated',
|
||||
'date-added': 'dateAdded',
|
||||
'date-last-opened': 'dateLastOpened',
|
||||
'size': 'size',
|
||||
'kind': 'kind',
|
||||
'label': 'label',
|
||||
}
|
||||
|
||||
icvp = {
|
||||
'viewOptionsVersion': 1,
|
||||
'backgroundType': 0,
|
||||
'backgroundColorRed': 1.0,
|
||||
'backgroundColorGreen': 1.0,
|
||||
'backgroundColorBlue': 1.0,
|
||||
'gridOffsetX': float(options['grid_offset'][0]),
|
||||
'gridOffsetY': float(options['grid_offset'][1]),
|
||||
'gridSpacing': float(options['grid_spacing']),
|
||||
'arrangeBy': str(arrange_options.get(options['arrange_by'], 'none')),
|
||||
'showIconPreview': options['show_icon_preview'] == True,
|
||||
'showItemInfo': False,
|
||||
'labelOnBottom': True,
|
||||
'textSize': float(options['text_size']),
|
||||
'iconSize': float(options['icon_size']),
|
||||
'scrollPositionX': float(options['scroll_position'][0]),
|
||||
'scrollPositionY': float(options['scroll_position'][1])
|
||||
}
|
||||
|
||||
columns = {
|
||||
'name': 'name',
|
||||
'date-modified': 'dateModified',
|
||||
'date-created': 'dateCreated',
|
||||
'date-added': 'dateAdded',
|
||||
'date-last-opened': 'dateLastOpened',
|
||||
'size': 'size',
|
||||
'kind': 'kind',
|
||||
'label': 'label',
|
||||
'version': 'version',
|
||||
'comments': 'comments'
|
||||
}
|
||||
|
||||
default_widths = {
|
||||
'name': 300,
|
||||
'date-modified': 181,
|
||||
'date-created': 181,
|
||||
'date-added': 181,
|
||||
'date-last-opened': 181,
|
||||
'size': 97,
|
||||
'kind': 115,
|
||||
'label': 100,
|
||||
'version': 75,
|
||||
'comments': 300,
|
||||
}
|
||||
|
||||
default_sort_directions = {
|
||||
'name': 'ascending',
|
||||
'date-modified': 'descending',
|
||||
'date-created': 'descending',
|
||||
'date-added': 'descending',
|
||||
'date-last-opened': 'descending',
|
||||
'size': 'descending',
|
||||
'kind': 'ascending',
|
||||
'label': 'ascending',
|
||||
'version': 'ascending',
|
||||
'comments': 'ascending',
|
||||
}
|
||||
|
||||
lsvp = {
|
||||
'viewOptionsVersion': 1,
|
||||
'sortColumn': columns.get(options['list_sort_by'], 'name'),
|
||||
'textSize': float(options['list_text_size']),
|
||||
'iconSize': float(options['list_icon_size']),
|
||||
'showIconPreview': options['show_icon_preview'],
|
||||
'scrollPositionX': options['list_scroll_position'][0],
|
||||
'scrollPositionY': options['list_scroll_position'][1],
|
||||
'useRelativeDates': True,
|
||||
'calculateAllSizes': False,
|
||||
}
|
||||
|
||||
lsvp['columns'] = {}
|
||||
cndx = {}
|
||||
|
||||
for n, column in enumerate(options['list_columns']):
|
||||
cndx[column] = n
|
||||
width = options['list_column_widths'].get(column, default_widths[column])
|
||||
asc = 'ascending' == options['list_column_sort_directions'].get(column, default_sort_directions[column])
|
||||
|
||||
lsvp['columns'][columns[column]] = {
|
||||
'index': n,
|
||||
'width': width,
|
||||
'identifier': columns[column],
|
||||
'visible': True,
|
||||
'ascending': asc
|
||||
}
|
||||
|
||||
n = len(options['list_columns'])
|
||||
for k in iterkeys(columns):
|
||||
if cndx.get(k, None) is None:
|
||||
cndx[k] = n
|
||||
width = default_widths[k]
|
||||
asc = 'ascending' == default_sort_directions[k]
|
||||
|
||||
lsvp['columns'][columns[column]] = {
|
||||
'index': n,
|
||||
'width': width,
|
||||
'identifier': columns[column],
|
||||
'visible': False,
|
||||
'ascending': asc
|
||||
}
|
||||
|
||||
n += 1
|
||||
|
||||
default_view = 'icon-view'
|
||||
views = {
|
||||
'icon-view': b'icnv',
|
||||
'column-view': b'clmv',
|
||||
'list-view': b'Nlsv',
|
||||
'coverflow': b'Flwv'
|
||||
}
|
||||
|
||||
icvl = (b'type', views.get(default_view, 'icnv'))
|
||||
|
||||
include_icon_view_settings = default_view == 'icon-view' \
|
||||
or options['include_icon_view_settings'] not in \
|
||||
('auto', 'no', 0, False, None)
|
||||
include_list_view_settings = default_view in ('list-view', 'coverflow') \
|
||||
or options['include_list_view_settings'] not in \
|
||||
('auto', 'no', 0, False, None)
|
||||
|
||||
try:
|
||||
background_bmk = None
|
||||
|
||||
background_color = os.environ.get('backgroundColor')
|
||||
background_file = os.environ.get('backgroundFile')
|
||||
|
||||
if background_color:
|
||||
c = parseColor(background_color).to_rgb()
|
||||
|
||||
icvp['backgroundType'] = 1
|
||||
icvp['backgroundColorRed'] = float(c.r)
|
||||
icvp['backgroundColorGreen'] = float(c.g)
|
||||
icvp['backgroundColorBlue'] = float(c.b)
|
||||
elif background_file:
|
||||
alias = Alias.for_file(background_file)
|
||||
background_bmk = Bookmark.for_file(background_file)
|
||||
|
||||
icvp['backgroundType'] = 2
|
||||
icvp['backgroundImageAlias'] = biplist.Data(alias.to_bytes())
|
||||
|
||||
image_dsstore = os.path.join(os.environ['volumePath'], '.DS_Store')
|
||||
|
||||
f = "icon_locations = {\n" + os.environ['iconLocations'] + "\n}"
|
||||
exec (f, options, options)
|
||||
|
||||
with DSStore.open(image_dsstore, 'w+') as d:
|
||||
d['.']['vSrn'] = ('long', 1)
|
||||
d['.']['bwsp'] = bwsp
|
||||
if include_icon_view_settings:
|
||||
d['.']['icvp'] = icvp
|
||||
if background_bmk:
|
||||
d['.']['pBBk'] = background_bmk
|
||||
if include_list_view_settings:
|
||||
d['.']['lsvp'] = lsvp
|
||||
d['.']['icvl'] = icvl
|
||||
|
||||
d['.background']['Iloc'] = (2560, 170)
|
||||
d['.DS_Store']['Iloc'] = (2610, 170)
|
||||
d['.fseventsd']['Iloc'] = (2660, 170)
|
||||
d['.Trashes']['Iloc'] = (2710, 170)
|
||||
d['.VolumeIcon.icns']['Iloc'] = (2760, 170)
|
||||
|
||||
for k, v in iteritems(options['icon_locations']):
|
||||
d[k]['Iloc'] = v
|
||||
except:
|
||||
raise
|
||||
|
||||
build_dmg()
|
||||
3
mc_test/node_modules/dmg-builder/vendor/ds_store/__init__.py
generated
vendored
Executable file
3
mc_test/node_modules/dmg-builder/vendor/ds_store/__init__.py
generated
vendored
Executable file
@ -0,0 +1,3 @@
|
||||
from .store import DSStore, DSStoreEntry
|
||||
|
||||
__all__ = ['DSStore', 'DSStoreEntry']
|
||||
478
mc_test/node_modules/dmg-builder/vendor/ds_store/buddy.py
generated
vendored
Executable file
478
mc_test/node_modules/dmg-builder/vendor/ds_store/buddy.py
generated
vendored
Executable file
@ -0,0 +1,478 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import bisect
|
||||
import struct
|
||||
import binascii
|
||||
|
||||
try:
|
||||
{}.iterkeys
|
||||
iterkeys = lambda x: x.iterkeys()
|
||||
except AttributeError:
|
||||
iterkeys = lambda x: x.keys()
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
class BuddyError(Exception):
|
||||
pass
|
||||
|
||||
class Block(object):
|
||||
def __init__(self, allocator, offset, size):
|
||||
self._allocator = allocator
|
||||
self._offset = offset
|
||||
self._size = size
|
||||
self._value = bytearray(allocator.read(offset, size))
|
||||
self._pos = 0
|
||||
self._dirty = False
|
||||
|
||||
def __len__(self):
|
||||
return self._size
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
if self._dirty:
|
||||
self.flush()
|
||||
|
||||
def flush(self):
|
||||
if self._dirty:
|
||||
self._dirty = False
|
||||
self._allocator.write(self._offset, self._value)
|
||||
|
||||
def invalidate(self):
|
||||
self._dirty = False
|
||||
|
||||
def zero_fill(self):
|
||||
len = self._size - self._pos
|
||||
zeroes = b'\0' * len
|
||||
self._value[self._pos:self._size] = zeroes
|
||||
self._dirty = True
|
||||
|
||||
def tell(self):
|
||||
return self._pos
|
||||
|
||||
def seek(self, pos, whence=os.SEEK_SET):
|
||||
if whence == os.SEEK_CUR:
|
||||
pos += self._pos
|
||||
elif whence == os.SEEK_END:
|
||||
pos = self._size - pos
|
||||
|
||||
if pos < 0 or pos > self._size:
|
||||
raise ValueError('Seek out of range in Block instance')
|
||||
|
||||
self._pos = pos
|
||||
|
||||
def read(self, size_or_format):
|
||||
if isinstance(size_or_format, (str, unicode, bytes)):
|
||||
size = struct.calcsize(size_or_format)
|
||||
fmt = size_or_format
|
||||
else:
|
||||
size = size_or_format
|
||||
fmt = None
|
||||
|
||||
if self._size - self._pos < size:
|
||||
raise BuddyError('Unable to read %lu bytes in block' % size)
|
||||
|
||||
data = self._value[self._pos:self._pos + size]
|
||||
self._pos += size
|
||||
|
||||
if fmt is not None:
|
||||
if isinstance(data, bytearray):
|
||||
return struct.unpack_from(fmt, bytes(data))
|
||||
else:
|
||||
return struct.unpack(fmt, data)
|
||||
else:
|
||||
return data
|
||||
|
||||
def write(self, data_or_format, *args):
|
||||
if len(args):
|
||||
data = struct.pack(data_or_format, *args)
|
||||
else:
|
||||
data = data_or_format
|
||||
|
||||
if self._pos + len(data) > self._size:
|
||||
raise ValueError('Attempt to write past end of Block')
|
||||
|
||||
self._value[self._pos:self._pos + len(data)] = data
|
||||
self._pos += len(data)
|
||||
|
||||
self._dirty = True
|
||||
|
||||
def insert(self, data_or_format, *args):
|
||||
if len(args):
|
||||
data = struct.pack(data_or_format, *args)
|
||||
else:
|
||||
data = data_or_format
|
||||
|
||||
del self._value[-len(data):]
|
||||
self._value[self._pos:self._pos] = data
|
||||
self._pos += len(data)
|
||||
|
||||
self._dirty = True
|
||||
|
||||
def delete(self, size):
|
||||
if self._pos + size > self._size:
|
||||
raise ValueError('Attempt to delete past end of Block')
|
||||
del self._value[self._pos:self._pos + size]
|
||||
self._value += b'\0' * size
|
||||
self._dirty = True
|
||||
|
||||
def __str__(self):
|
||||
return binascii.b2a_hex(self._value)
|
||||
|
||||
class Allocator(object):
|
||||
def __init__(self, the_file):
|
||||
self._file = the_file
|
||||
self._dirty = False
|
||||
|
||||
self._file.seek(0)
|
||||
|
||||
# Read the header
|
||||
magic1, magic2, offset, size, offset2, self._unknown1 \
|
||||
= self.read(-4, '>I4sIII16s')
|
||||
|
||||
if magic2 != b'Bud1' or magic1 != 1:
|
||||
raise BuddyError('Not a buddy file')
|
||||
|
||||
if offset != offset2:
|
||||
raise BuddyError('Root addresses differ')
|
||||
|
||||
self._root = Block(self, offset, size)
|
||||
|
||||
# Read the block offsets
|
||||
count, self._unknown2 = self._root.read('>II')
|
||||
self._offsets = []
|
||||
c = (count + 255) & ~255
|
||||
while c:
|
||||
self._offsets += self._root.read('>256I')
|
||||
c -= 256
|
||||
self._offsets = self._offsets[:count]
|
||||
|
||||
# Read the TOC
|
||||
self._toc = {}
|
||||
count = self._root.read('>I')[0]
|
||||
for n in range(count):
|
||||
nlen = self._root.read('B')[0]
|
||||
name = bytes(self._root.read(nlen))
|
||||
value = self._root.read('>I')[0]
|
||||
self._toc[name] = value
|
||||
|
||||
# Read the free lists
|
||||
self._free = []
|
||||
for n in range(32):
|
||||
count = self._root.read('>I')
|
||||
self._free.append(list(self._root.read('>%uI' % count)))
|
||||
|
||||
@classmethod
|
||||
def open(cls, file_or_name, mode='r+'):
|
||||
if isinstance(file_or_name, (str, unicode)):
|
||||
if not 'b' in mode:
|
||||
mode = mode[:1] + 'b' + mode[1:]
|
||||
f = open(file_or_name, mode)
|
||||
else:
|
||||
f = file_or_name
|
||||
|
||||
if 'w' in mode:
|
||||
# Create an empty file in this case
|
||||
f.truncate()
|
||||
|
||||
# An empty root block needs 1264 bytes:
|
||||
#
|
||||
# 0 4 offset count
|
||||
# 4 4 unknown
|
||||
# 8 4 root block offset (2048)
|
||||
# 12 255 * 4 padding (offsets are in multiples of 256)
|
||||
# 1032 4 toc count (0)
|
||||
# 1036 228 free list
|
||||
# total 1264
|
||||
|
||||
# The free list will contain the following:
|
||||
#
|
||||
# 0 5 * 4 no blocks of width less than 5
|
||||
# 20 6 * 8 1 block each of widths 5 to 10
|
||||
# 68 4 no blocks of width 11 (allocated for the root)
|
||||
# 72 19 * 8 1 block each of widths 12 to 30
|
||||
# 224 4 no blocks of width 31
|
||||
# total 228
|
||||
#
|
||||
# (The reason for this layout is that we allocate 2**5 bytes for
|
||||
# the header, which splits the initial 2GB region into every size
|
||||
# below 2**31, including *two* blocks of size 2**5, one of which
|
||||
# we take. The root block itself then needs a block of size
|
||||
# 2**11. Conveniently, each of these initial blocks will be
|
||||
# located at offset 2**n where n is its width.)
|
||||
|
||||
# Write the header
|
||||
header = struct.pack(b'>I4sIII16s',
|
||||
1, b'Bud1',
|
||||
2048, 1264, 2048,
|
||||
b'\x00\x00\x10\x0c'
|
||||
b'\x00\x00\x00\x87'
|
||||
b'\x00\x00\x20\x0b'
|
||||
b'\x00\x00\x00\x00')
|
||||
f.write(header)
|
||||
f.write(b'\0' * 2016)
|
||||
|
||||
# Write the root block
|
||||
free_list = [struct.pack(b'>5I', 0, 0, 0, 0, 0)]
|
||||
for n in range(5, 11):
|
||||
free_list.append(struct.pack(b'>II', 1, 2**n))
|
||||
free_list.append(struct.pack(b'>I', 0))
|
||||
for n in range(12, 31):
|
||||
free_list.append(struct.pack(b'>II', 1, 2**n))
|
||||
free_list.append(struct.pack(b'>I', 0))
|
||||
|
||||
root = b''.join([struct.pack(b'>III', 1, 0, 2048 | 5),
|
||||
struct.pack(b'>I', 0) * 255,
|
||||
struct.pack(b'>I', 0)] + free_list)
|
||||
f.write(root)
|
||||
|
||||
return Allocator(f)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
self.flush()
|
||||
self._file.close()
|
||||
|
||||
def flush(self):
|
||||
if self._dirty:
|
||||
size = self._root_block_size()
|
||||
self.allocate(size, 0)
|
||||
with self.get_block(0) as rblk:
|
||||
self._write_root_block_into(rblk)
|
||||
|
||||
addr = self._offsets[0]
|
||||
offset = addr & ~0x1f
|
||||
size = 1 << (addr & 0x1f)
|
||||
|
||||
self._file.seek(0, os.SEEK_SET)
|
||||
self._file.write(struct.pack(b'>I4sIII16s',
|
||||
1, b'Bud1',
|
||||
offset, size, offset,
|
||||
self._unknown1))
|
||||
|
||||
self._dirty = False
|
||||
|
||||
self._file.flush()
|
||||
|
||||
def read(self, offset, size_or_format):
|
||||
"""Read data at `offset', or raise an exception. `size_or_format'
|
||||
may either be a byte count, in which case we return raw data,
|
||||
or a format string for `struct.unpack', in which case we
|
||||
work out the size and unpack the data before returning it."""
|
||||
# N.B. There is a fixed offset of four bytes(!)
|
||||
self._file.seek(offset + 4, os.SEEK_SET)
|
||||
|
||||
if isinstance(size_or_format, (str, unicode)):
|
||||
size = struct.calcsize(size_or_format)
|
||||
fmt = size_or_format
|
||||
else:
|
||||
size = size_or_format
|
||||
fmt = None
|
||||
|
||||
ret = self._file.read(size)
|
||||
if len(ret) < size:
|
||||
ret += b'\0' * (size - len(ret))
|
||||
|
||||
if fmt is not None:
|
||||
if isinstance(ret, bytearray):
|
||||
ret = struct.unpack_from(fmt, bytes(ret))
|
||||
else:
|
||||
ret = struct.unpack(fmt, ret)
|
||||
|
||||
return ret
|
||||
|
||||
def write(self, offset, data_or_format, *args):
|
||||
"""Write data at `offset', or raise an exception. `data_or_format'
|
||||
may either be the data to write, or a format string for `struct.pack',
|
||||
in which case we pack the additional arguments and write the
|
||||
resulting data."""
|
||||
# N.B. There is a fixed offset of four bytes(!)
|
||||
self._file.seek(offset + 4, os.SEEK_SET)
|
||||
|
||||
if len(args):
|
||||
data = struct.pack(data_or_format, *args)
|
||||
else:
|
||||
data = data_or_format
|
||||
|
||||
self._file.write(data)
|
||||
|
||||
def get_block(self, block):
|
||||
try:
|
||||
addr = self._offsets[block]
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
offset = addr & ~0x1f
|
||||
size = 1 << (addr & 0x1f)
|
||||
|
||||
return Block(self, offset, size)
|
||||
|
||||
def _root_block_size(self):
|
||||
"""Return the number of bytes required by the root block."""
|
||||
# Offsets
|
||||
size = 8
|
||||
size += 4 * ((len(self._offsets) + 255) & ~255)
|
||||
|
||||
# TOC
|
||||
size += 4
|
||||
size += sum([5 + len(s) for s in self._toc])
|
||||
|
||||
# Free list
|
||||
size += sum([4 + 4 * len(fl) for fl in self._free])
|
||||
|
||||
return size
|
||||
|
||||
def _write_root_block_into(self, block):
|
||||
# Offsets
|
||||
block.write('>II', len(self._offsets), self._unknown2)
|
||||
block.write('>%uI' % len(self._offsets), *self._offsets)
|
||||
extra = len(self._offsets) & 255
|
||||
if extra:
|
||||
block.write(b'\0\0\0\0' * (256 - extra))
|
||||
|
||||
# TOC
|
||||
keys = list(self._toc.keys())
|
||||
keys.sort()
|
||||
|
||||
block.write('>I', len(keys))
|
||||
for k in keys:
|
||||
block.write('B', len(k))
|
||||
block.write(k)
|
||||
block.write('>I', self._toc[k])
|
||||
|
||||
# Free list
|
||||
for w, f in enumerate(self._free):
|
||||
block.write('>I', len(f))
|
||||
if len(f):
|
||||
block.write('>%uI' % len(f), *f)
|
||||
|
||||
def _buddy(self, offset, width):
|
||||
f = self._free[width]
|
||||
b = offset ^ (1 << width)
|
||||
|
||||
try:
|
||||
ndx = f.index(b)
|
||||
except ValueError:
|
||||
ndx = None
|
||||
|
||||
return (f, b, ndx)
|
||||
|
||||
def _release(self, offset, width):
|
||||
# Coalesce
|
||||
while True:
|
||||
f,b,ndx = self._buddy(offset, width)
|
||||
|
||||
if ndx is None:
|
||||
break
|
||||
|
||||
offset &= b
|
||||
width += 1
|
||||
del f[ndx]
|
||||
|
||||
# Add to the list
|
||||
bisect.insort(f, offset)
|
||||
|
||||
# Mark as dirty
|
||||
self._dirty = True
|
||||
|
||||
def _alloc(self, width):
|
||||
w = width
|
||||
while not self._free[w]:
|
||||
w += 1
|
||||
while w > width:
|
||||
offset = self._free[w].pop(0)
|
||||
w -= 1
|
||||
self._free[w] = [offset, offset ^ (1 << w)]
|
||||
self._dirty = True
|
||||
return self._free[width].pop(0)
|
||||
|
||||
def allocate(self, bytes, block=None):
|
||||
"""Allocate or reallocate a block such that it has space for at least
|
||||
`bytes' bytes."""
|
||||
if block is None:
|
||||
# Find the first unused block
|
||||
try:
|
||||
block = self._offsets.index(0)
|
||||
except ValueError:
|
||||
block = len(self._offsets)
|
||||
self._offsets.append(0)
|
||||
|
||||
# Compute block width
|
||||
width = max(bytes.bit_length(), 5)
|
||||
|
||||
addr = self._offsets[block]
|
||||
offset = addr & ~0x1f
|
||||
|
||||
if addr:
|
||||
blkwidth = addr & 0x1f
|
||||
if blkwidth == width:
|
||||
return block
|
||||
self._release(offset, width)
|
||||
self._offsets[block] = 0
|
||||
|
||||
offset = self._alloc(width)
|
||||
self._offsets[block] = offset | width
|
||||
return block
|
||||
|
||||
def release(self, block):
|
||||
addr = self._offsets[block]
|
||||
|
||||
if addr:
|
||||
width = addr & 0x1f
|
||||
offset = addr & ~0x1f
|
||||
self._release(offset, width)
|
||||
|
||||
if block == len(self._offsets):
|
||||
del self._offsets[block]
|
||||
else:
|
||||
self._offsets[block] = 0
|
||||
|
||||
def __len__(self):
|
||||
return len(self._toc)
|
||||
|
||||
def __getitem__(self, key):
|
||||
if not isinstance(key, (str, unicode)):
|
||||
raise TypeError('Keys must be of string type')
|
||||
if not isinstance(key, bytes):
|
||||
key = key.encode('latin_1')
|
||||
return self._toc[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(key, (str, unicode)):
|
||||
raise TypeError('Keys must be of string type')
|
||||
if not isinstance(key, bytes):
|
||||
key = key.encode('latin_1')
|
||||
self._toc[key] = value
|
||||
self._dirty = True
|
||||
|
||||
def __delitem__(self, key):
|
||||
if not isinstance(key, (str, unicode)):
|
||||
raise TypeError('Keys must be of string type')
|
||||
if not isinstance(key, bytes):
|
||||
key = key.encode('latin_1')
|
||||
del self._toc[key]
|
||||
self._dirty = True
|
||||
|
||||
def iterkeys(self):
|
||||
return iterkeys(self._toc)
|
||||
|
||||
def keys(self):
|
||||
return iterkeys(self._toc)
|
||||
|
||||
def __iter__(self):
|
||||
return iterkeys(self._toc)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._toc
|
||||
|
||||
1251
mc_test/node_modules/dmg-builder/vendor/ds_store/store.py
generated
vendored
Executable file
1251
mc_test/node_modules/dmg-builder/vendor/ds_store/store.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load Diff
27
mc_test/node_modules/dmg-builder/vendor/mac_alias/__init__.py
generated
vendored
Executable file
27
mc_test/node_modules/dmg-builder/vendor/mac_alias/__init__.py
generated
vendored
Executable file
@ -0,0 +1,27 @@
|
||||
from .alias import *
|
||||
from .bookmark import *
|
||||
|
||||
__all__ = [ 'ALIAS_KIND_FILE', 'ALIAS_KIND_FOLDER',
|
||||
'ALIAS_HFS_VOLUME_SIGNATURE',
|
||||
'ALIAS_FIXED_DISK', 'ALIAS_NETWORK_DISK', 'ALIAS_400KB_FLOPPY_DISK',
|
||||
'ALIAS_800KB_FLOPPY_DISK', 'ALIAS_1_44MB_FLOPPY_DISK',
|
||||
'ALIAS_EJECTABLE_DISK',
|
||||
'ALIAS_NO_CNID',
|
||||
'kBookmarkPath', 'kBookmarkCNIDPath', 'kBookmarkFileProperties',
|
||||
'kBookmarkFileName', 'kBookmarkFileID', 'kBookmarkFileCreationDate',
|
||||
'kBookmarkTOCPath', 'kBookmarkVolumePath',
|
||||
'kBookmarkVolumeURL', 'kBookmarkVolumeName', 'kBookmarkVolumeUUID',
|
||||
'kBookmarkVolumeSize', 'kBookmarkVolumeCreationDate',
|
||||
'kBookmarkVolumeProperties', 'kBookmarkContainingFolder',
|
||||
'kBookmarkUserName', 'kBookmarkUID', 'kBookmarkWasFileReference',
|
||||
'kBookmarkCreationOptions', 'kBookmarkURLLengths',
|
||||
'kBookmarkSecurityExtension',
|
||||
'AppleShareInfo',
|
||||
'VolumeInfo',
|
||||
'TargetInfo',
|
||||
'Alias',
|
||||
'Bookmark',
|
||||
'Data',
|
||||
'URL' ]
|
||||
|
||||
|
||||
662
mc_test/node_modules/dmg-builder/vendor/mac_alias/alias.py
generated
vendored
Executable file
662
mc_test/node_modules/dmg-builder/vendor/mac_alias/alias.py
generated
vendored
Executable file
@ -0,0 +1,662 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import division
|
||||
|
||||
import struct
|
||||
import datetime
|
||||
import io
|
||||
import re
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
import sys
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
from . import osx
|
||||
|
||||
try:
|
||||
long
|
||||
except NameError:
|
||||
long = int
|
||||
|
||||
from .utils import *
|
||||
|
||||
ALIAS_KIND_FILE = 0
|
||||
ALIAS_KIND_FOLDER = 1
|
||||
|
||||
ALIAS_HFS_VOLUME_SIGNATURE = b'H+'
|
||||
|
||||
ALIAS_FILESYSTEM_UDF = 'UDF (CD/DVD)'
|
||||
ALIAS_FILESYSTEM_FAT32 = 'FAT32'
|
||||
ALIAS_FILESYSTEM_EXFAT = 'exFAT'
|
||||
ALIAS_FILESYSTEM_HFSX = 'HFSX'
|
||||
ALIAS_FILESYSTEM_HFSPLUS = 'HFS+'
|
||||
ALIAS_FILESYSTEM_FTP = 'FTP'
|
||||
ALIAS_FILESYSTEM_NTFS = 'NTFS'
|
||||
ALIAS_FILESYSTEM_UNKNOWN = 'unknown'
|
||||
|
||||
ALIAS_FIXED_DISK = 0
|
||||
ALIAS_NETWORK_DISK = 1
|
||||
ALIAS_400KB_FLOPPY_DISK = 2
|
||||
ALIAS_800KB_FLOPPY_DISK = 3
|
||||
ALIAS_1_44MB_FLOPPY_DISK = 4
|
||||
ALIAS_EJECTABLE_DISK = 5
|
||||
|
||||
ALIAS_NO_CNID = 0xffffffff
|
||||
|
||||
ALIAS_FSTYPE_MAP = {
|
||||
# Version 2 aliases
|
||||
b'HX': ALIAS_FILESYSTEM_HFSX,
|
||||
b'H+': ALIAS_FILESYSTEM_HFSPLUS,
|
||||
|
||||
# Version 3 aliases
|
||||
b'BDcu': ALIAS_FILESYSTEM_UDF,
|
||||
b'BDIS': ALIAS_FILESYSTEM_FAT32,
|
||||
b'BDxF': ALIAS_FILESYSTEM_EXFAT,
|
||||
b'HX\0\0': ALIAS_FILESYSTEM_HFSX,
|
||||
b'H+\0\0': ALIAS_FILESYSTEM_HFSPLUS,
|
||||
b'KG\0\0': ALIAS_FILESYSTEM_FTP,
|
||||
b'NTcu': ALIAS_FILESYSTEM_NTFS,
|
||||
}
|
||||
|
||||
def encode_utf8(s):
|
||||
if isinstance(s, bytes):
|
||||
return s
|
||||
return s.encode('utf-8')
|
||||
|
||||
def decode_utf8(s):
|
||||
if isinstance(s, bytes):
|
||||
return s.decode('utf-8')
|
||||
return s
|
||||
|
||||
class AppleShareInfo (object):
|
||||
def __init__(self, zone=None, server=None, user=None):
|
||||
#: The AppleShare zone
|
||||
self.zone = zone
|
||||
#: The AFP server
|
||||
self.server = server
|
||||
#: The username
|
||||
self.user = user
|
||||
|
||||
def __repr__(self):
|
||||
return 'AppleShareInfo(%r,%r,%r)' % (self.zone, self.server, self.user)
|
||||
|
||||
class VolumeInfo (object):
|
||||
def __init__(self, name, creation_date, fs_type, disk_type,
|
||||
attribute_flags, fs_id, appleshare_info=None,
|
||||
driver_name=None, posix_path=None, disk_image_alias=None,
|
||||
dialup_info=None, network_mount_info=None):
|
||||
#: The name of the volume on which the target resides
|
||||
self.name = name
|
||||
|
||||
#: The creation date of the target's volume
|
||||
self.creation_date = creation_date
|
||||
|
||||
#: The filesystem type
|
||||
#: (for v2 aliases, this is a 2-character code; for v3 aliases, a
|
||||
#: 4-character code).
|
||||
self.fs_type = fs_type
|
||||
|
||||
#: The type of disk; should be one of
|
||||
#:
|
||||
#: * ALIAS_FIXED_DISK
|
||||
#: * ALIAS_NETWORK_DISK
|
||||
#: * ALIAS_400KB_FLOPPY_DISK
|
||||
#: * ALIAS_800KB_FLOPPY_DISK
|
||||
#: * ALIAS_1_44MB_FLOPPY_DISK
|
||||
#: * ALIAS_EJECTABLE_DISK
|
||||
self.disk_type = disk_type
|
||||
|
||||
#: Filesystem attribute flags (from HFS volume header)
|
||||
self.attribute_flags = attribute_flags
|
||||
|
||||
#: Filesystem identifier
|
||||
self.fs_id = fs_id
|
||||
|
||||
#: AppleShare information (for automatic remounting of network shares)
|
||||
#: *(optional)*
|
||||
self.appleshare_info = appleshare_info
|
||||
|
||||
#: Driver name (*probably* contains a disk driver name on older Macs)
|
||||
#: *(optional)*
|
||||
self.driver_name = driver_name
|
||||
|
||||
#: POSIX path of the mount point of the target's volume
|
||||
#: *(optional)*
|
||||
self.posix_path = posix_path
|
||||
|
||||
#: :class:`Alias` object pointing at the disk image on which the
|
||||
#: target's volume resides *(optional)*
|
||||
self.disk_image_alias = disk_image_alias
|
||||
|
||||
#: Dialup information (for automatic establishment of dialup connections)
|
||||
self.dialup_info = dialup_info
|
||||
|
||||
#: Network mount information (for automatic remounting)
|
||||
self.network_mount_info = network_mount_info
|
||||
|
||||
@property
|
||||
def filesystem_type(self):
|
||||
return ALIAS_FSTYPE_MAP.get(self.fs_type, ALIAS_FILESYSTEM_UNKNOWN)
|
||||
|
||||
def __repr__(self):
|
||||
args = ['name', 'creation_date', 'fs_type', 'disk_type',
|
||||
'attribute_flags', 'fs_id']
|
||||
values = []
|
||||
for a in args:
|
||||
v = getattr(self, a)
|
||||
values.append(repr(v))
|
||||
|
||||
kwargs = ['appleshare_info', 'driver_name', 'posix_path',
|
||||
'disk_image_alias', 'dialup_info', 'network_mount_info']
|
||||
for a in kwargs:
|
||||
v = getattr(self, a)
|
||||
if v is not None:
|
||||
values.append('%s=%r' % (a, v))
|
||||
return 'VolumeInfo(%s)' % ','.join(values)
|
||||
|
||||
class TargetInfo (object):
|
||||
def __init__(self, kind, filename, folder_cnid, cnid, creation_date,
|
||||
creator_code, type_code, levels_from=-1, levels_to=-1,
|
||||
folder_name=None, cnid_path=None, carbon_path=None,
|
||||
posix_path=None, user_home_prefix_len=None):
|
||||
#: Either ALIAS_KIND_FILE or ALIAS_KIND_FOLDER
|
||||
self.kind = kind
|
||||
|
||||
#: The filename of the target
|
||||
self.filename = filename
|
||||
|
||||
#: The CNID (Catalog Node ID) of the target's containing folder;
|
||||
#: CNIDs are similar to but different than traditional UNIX inode
|
||||
#: numbers
|
||||
self.folder_cnid = folder_cnid
|
||||
|
||||
#: The CNID (Catalog Node ID) of the target
|
||||
self.cnid = cnid
|
||||
|
||||
#: The target's *creation* date.
|
||||
self.creation_date = creation_date
|
||||
|
||||
#: The target's Mac creator code (a four-character binary string)
|
||||
self.creator_code = creator_code
|
||||
|
||||
#: The target's Mac type code (a four-character binary string)
|
||||
self.type_code = type_code
|
||||
|
||||
#: The depth of the alias? Always seems to be -1 on OS X.
|
||||
self.levels_from = levels_from
|
||||
|
||||
#: The depth of the target? Always seems to be -1 on OS X.
|
||||
self.levels_to = levels_to
|
||||
|
||||
#: The (POSIX) name of the target's containing folder. *(optional)*
|
||||
self.folder_name = folder_name
|
||||
|
||||
#: The path from the volume root as a sequence of CNIDs. *(optional)*
|
||||
self.cnid_path = cnid_path
|
||||
|
||||
#: The Carbon path of the target *(optional)*
|
||||
self.carbon_path = carbon_path
|
||||
|
||||
#: The POSIX path of the target relative to the volume root. Note
|
||||
#: that this may or may not have a leading '/' character, but it is
|
||||
#: always relative to the containing volume. *(optional)*
|
||||
self.posix_path = posix_path
|
||||
|
||||
#: If the path points into a user's home folder, the number of folders
|
||||
#: deep that we go before we get to that home folder. *(optional)*
|
||||
self.user_home_prefix_len = user_home_prefix_len
|
||||
|
||||
def __repr__(self):
|
||||
args = ['kind', 'filename', 'folder_cnid', 'cnid', 'creation_date',
|
||||
'creator_code', 'type_code']
|
||||
values = []
|
||||
for a in args:
|
||||
v = getattr(self, a)
|
||||
values.append(repr(v))
|
||||
|
||||
if self.levels_from != -1:
|
||||
values.append('levels_from=%r' % self.levels_from)
|
||||
if self.levels_to != -1:
|
||||
values.append('levels_to=%r' % self.levels_to)
|
||||
|
||||
kwargs = ['folder_name', 'cnid_path', 'carbon_path',
|
||||
'posix_path', 'user_home_prefix_len']
|
||||
for a in kwargs:
|
||||
v = getattr(self, a)
|
||||
values.append('%s=%r' % (a, v))
|
||||
|
||||
return 'TargetInfo(%s)' % ','.join(values)
|
||||
|
||||
TAG_CARBON_FOLDER_NAME = 0
|
||||
TAG_CNID_PATH = 1
|
||||
TAG_CARBON_PATH = 2
|
||||
TAG_APPLESHARE_ZONE = 3
|
||||
TAG_APPLESHARE_SERVER_NAME = 4
|
||||
TAG_APPLESHARE_USERNAME = 5
|
||||
TAG_DRIVER_NAME = 6
|
||||
TAG_NETWORK_MOUNT_INFO = 9
|
||||
TAG_DIALUP_INFO = 10
|
||||
TAG_UNICODE_FILENAME = 14
|
||||
TAG_UNICODE_VOLUME_NAME = 15
|
||||
TAG_HIGH_RES_VOLUME_CREATION_DATE = 16
|
||||
TAG_HIGH_RES_CREATION_DATE = 17
|
||||
TAG_POSIX_PATH = 18
|
||||
TAG_POSIX_PATH_TO_MOUNTPOINT = 19
|
||||
TAG_RECURSIVE_ALIAS_OF_DISK_IMAGE = 20
|
||||
TAG_USER_HOME_LENGTH_PREFIX = 21
|
||||
|
||||
class Alias (object):
|
||||
def __init__(self, appinfo=b'\0\0\0\0', version=2, volume=None,
|
||||
target=None, extra=[]):
|
||||
"""Construct a new :class:`Alias` object with the specified
|
||||
contents."""
|
||||
|
||||
#: Application specific information (four byte byte-string)
|
||||
self.appinfo = appinfo
|
||||
|
||||
#: Version (we support versions 2 and 3)
|
||||
self.version = version
|
||||
|
||||
#: A :class:`VolumeInfo` object describing the target's volume
|
||||
self.volume = volume
|
||||
|
||||
#: A :class:`TargetInfo` object describing the target
|
||||
self.target = target
|
||||
|
||||
#: A list of extra `(tag, value)` pairs
|
||||
self.extra = list(extra)
|
||||
|
||||
@classmethod
|
||||
def _from_fd(cls, b):
|
||||
appinfo, recsize, version = struct.unpack(b'>4shh', b.read(8))
|
||||
|
||||
if recsize < 150:
|
||||
raise ValueError('Incorrect alias length')
|
||||
|
||||
if version not in (2, 3):
|
||||
raise ValueError('Unsupported alias version %u' % version)
|
||||
|
||||
if version == 2:
|
||||
kind, volname, voldate, fstype, disktype, \
|
||||
folder_cnid, filename, cnid, crdate, creator_code, type_code, \
|
||||
levels_from, levels_to, volattrs, volfsid, reserved = \
|
||||
struct.unpack(b'>h28pI2shI64pII4s4shhI2s10s', b.read(142))
|
||||
else:
|
||||
kind, voldate_hr, fstype, disktype, folder_cnid, cnid, crdate_hr, \
|
||||
volattrs, reserved = \
|
||||
struct.unpack(b'>hQ4shIIQI14s', b.read(46))
|
||||
|
||||
volname = b''
|
||||
filename = b''
|
||||
creator_code = None
|
||||
type_code = None
|
||||
voldate = voldate_hr / 65536.0
|
||||
crdate = crdate_hr / 65536.0
|
||||
|
||||
voldate = mac_epoch + datetime.timedelta(seconds=voldate)
|
||||
crdate = mac_epoch + datetime.timedelta(seconds=crdate)
|
||||
|
||||
alias = Alias()
|
||||
alias.appinfo = appinfo
|
||||
|
||||
alias.volume = VolumeInfo (volname.decode().replace('/',':'),
|
||||
voldate, fstype, disktype,
|
||||
volattrs, volfsid)
|
||||
alias.target = TargetInfo (kind, filename.decode().replace('/',':'),
|
||||
folder_cnid, cnid,
|
||||
crdate, creator_code, type_code)
|
||||
alias.target.levels_from = levels_from
|
||||
alias.target.levels_to = levels_to
|
||||
|
||||
tag = struct.unpack(b'>h', b.read(2))[0]
|
||||
|
||||
while tag != -1:
|
||||
length = struct.unpack(b'>h', b.read(2))[0]
|
||||
value = b.read(length)
|
||||
if length & 1:
|
||||
b.read(1)
|
||||
|
||||
if tag == TAG_CARBON_FOLDER_NAME:
|
||||
alias.target.folder_name = value.decode().replace('/',':')
|
||||
elif tag == TAG_CNID_PATH:
|
||||
alias.target.cnid_path = struct.unpack('>%uI' % (length // 4),
|
||||
value)
|
||||
elif tag == TAG_CARBON_PATH:
|
||||
alias.target.carbon_path = value
|
||||
elif tag == TAG_APPLESHARE_ZONE:
|
||||
if alias.volume.appleshare_info is None:
|
||||
alias.volume.appleshare_info = AppleShareInfo()
|
||||
alias.volume.appleshare_info.zone = value
|
||||
elif tag == TAG_APPLESHARE_SERVER_NAME:
|
||||
if alias.volume.appleshare_info is None:
|
||||
alias.volume.appleshare_info = AppleShareInfo()
|
||||
alias.volume.appleshare_info.server = value
|
||||
elif tag == TAG_APPLESHARE_USERNAME:
|
||||
if alias.volume.appleshare_info is None:
|
||||
alias.volume.appleshare_info = AppleShareInfo()
|
||||
alias.volume.appleshare_info.user = value
|
||||
elif tag == TAG_DRIVER_NAME:
|
||||
alias.volume.driver_name = value
|
||||
elif tag == TAG_NETWORK_MOUNT_INFO:
|
||||
alias.volume.network_mount_info = value
|
||||
elif tag == TAG_DIALUP_INFO:
|
||||
alias.volume.dialup_info = value
|
||||
elif tag == TAG_UNICODE_FILENAME:
|
||||
alias.target.filename = value[2:].decode('utf-16be')
|
||||
elif tag == TAG_UNICODE_VOLUME_NAME:
|
||||
alias.volume.name = value[2:].decode('utf-16be')
|
||||
elif tag == TAG_HIGH_RES_VOLUME_CREATION_DATE:
|
||||
seconds = struct.unpack(b'>Q', value)[0] / 65536.0
|
||||
alias.volume.creation_date \
|
||||
= mac_epoch + datetime.timedelta(seconds=seconds)
|
||||
elif tag == TAG_HIGH_RES_CREATION_DATE:
|
||||
seconds = struct.unpack(b'>Q', value)[0] / 65536.0
|
||||
alias.target.creation_date \
|
||||
= mac_epoch + datetime.timedelta(seconds=seconds)
|
||||
elif tag == TAG_POSIX_PATH:
|
||||
alias.target.posix_path = value.decode()
|
||||
elif tag == TAG_POSIX_PATH_TO_MOUNTPOINT:
|
||||
alias.volume.posix_path = value.decode()
|
||||
elif tag == TAG_RECURSIVE_ALIAS_OF_DISK_IMAGE:
|
||||
alias.volume.disk_image_alias = Alias.from_bytes(value)
|
||||
elif tag == TAG_USER_HOME_LENGTH_PREFIX:
|
||||
alias.target.user_home_prefix_len = struct.unpack(b'>h', value)[0]
|
||||
else:
|
||||
alias.extra.append((tag, value))
|
||||
|
||||
tag = struct.unpack(b'>h', b.read(2))[0]
|
||||
|
||||
return alias
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, bytes):
|
||||
"""Construct an :class:`Alias` object given binary Alias data."""
|
||||
with io.BytesIO(bytes) as b:
|
||||
return cls._from_fd(b)
|
||||
|
||||
@classmethod
|
||||
def for_file(cls, path):
|
||||
"""Create an :class:`Alias` that points at the specified file."""
|
||||
if sys.platform != 'darwin':
|
||||
raise Exception('Not implemented (requires special support)')
|
||||
|
||||
path = encode_utf8(path)
|
||||
|
||||
a = Alias()
|
||||
|
||||
# Find the filesystem
|
||||
st = osx.statfs(path)
|
||||
vol_path = st.f_mntonname
|
||||
|
||||
# Grab its attributes
|
||||
attrs = [osx.ATTR_CMN_CRTIME,
|
||||
osx.ATTR_VOL_NAME,
|
||||
0, 0, 0]
|
||||
volinfo = osx.getattrlist(vol_path, attrs, 0)
|
||||
|
||||
vol_crtime = volinfo[0]
|
||||
vol_name = encode_utf8(volinfo[1])
|
||||
|
||||
# Also grab various attributes of the file
|
||||
attrs = [(osx.ATTR_CMN_OBJTYPE
|
||||
| osx.ATTR_CMN_CRTIME
|
||||
| osx.ATTR_CMN_FNDRINFO
|
||||
| osx.ATTR_CMN_FILEID
|
||||
| osx.ATTR_CMN_PARENTID), 0, 0, 0, 0]
|
||||
info = osx.getattrlist(path, attrs, osx.FSOPT_NOFOLLOW)
|
||||
|
||||
if info[0] == osx.VDIR:
|
||||
kind = ALIAS_KIND_FOLDER
|
||||
else:
|
||||
kind = ALIAS_KIND_FILE
|
||||
|
||||
cnid = info[3]
|
||||
folder_cnid = info[4]
|
||||
|
||||
dirname, filename = os.path.split(path)
|
||||
|
||||
if dirname == b'' or dirname == b'.':
|
||||
dirname = os.getcwd()
|
||||
|
||||
foldername = os.path.basename(dirname)
|
||||
|
||||
creation_date = info[1]
|
||||
|
||||
if kind == ALIAS_KIND_FILE:
|
||||
creator_code = struct.pack(b'I', info[2].fileInfo.fileCreator)
|
||||
type_code = struct.pack(b'I', info[2].fileInfo.fileType)
|
||||
else:
|
||||
creator_code = b'\0\0\0\0'
|
||||
type_code = b'\0\0\0\0'
|
||||
|
||||
a.target = TargetInfo(kind, filename, folder_cnid, cnid, creation_date,
|
||||
creator_code, type_code)
|
||||
a.volume = VolumeInfo(vol_name, vol_crtime, b'H+',
|
||||
ALIAS_FIXED_DISK, 0, b'\0\0')
|
||||
|
||||
a.target.folder_name = foldername
|
||||
a.volume.posix_path = vol_path
|
||||
|
||||
rel_path = os.path.relpath(path, vol_path)
|
||||
|
||||
# Leave off the initial '/' if vol_path is '/' (no idea why)
|
||||
if vol_path == b'/':
|
||||
a.target.posix_path = rel_path
|
||||
else:
|
||||
a.target.posix_path = b'/' + rel_path
|
||||
|
||||
# Construct the Carbon and CNID paths
|
||||
carbon_path = []
|
||||
cnid_path = []
|
||||
head, tail = os.path.split(rel_path)
|
||||
if not tail:
|
||||
head, tail = os.path.split(head)
|
||||
while head or tail:
|
||||
if head:
|
||||
attrs = [osx.ATTR_CMN_FILEID, 0, 0, 0, 0]
|
||||
info = osx.getattrlist(os.path.join(vol_path, head), attrs, 0)
|
||||
cnid_path.append(info[0])
|
||||
carbon_tail = tail.replace(b':',b'/')
|
||||
carbon_path.insert(0, carbon_tail)
|
||||
head, tail = os.path.split(head)
|
||||
|
||||
carbon_path = vol_name + b':' + b':\0'.join(carbon_path)
|
||||
|
||||
a.target.carbon_path = carbon_path
|
||||
a.target.cnid_path = cnid_path
|
||||
|
||||
return a
|
||||
|
||||
def _to_fd(self, b):
|
||||
# We'll come back and fix the length when we're done
|
||||
pos = b.tell()
|
||||
b.write(struct.pack(b'>4shh', self.appinfo, 0, self.version))
|
||||
|
||||
carbon_volname = encode_utf8(self.volume.name).replace(b':',b'/')
|
||||
carbon_filename = encode_utf8(self.target.filename).replace(b':',b'/')
|
||||
voldate = (self.volume.creation_date - mac_epoch).total_seconds()
|
||||
crdate = (self.target.creation_date - mac_epoch).total_seconds()
|
||||
|
||||
if self.version == 2:
|
||||
# NOTE: crdate should be in local time, but that's system dependent
|
||||
# (so doing so is ridiculous, and nothing could rely on it).
|
||||
b.write(struct.pack(b'>h28pI2shI64pII4s4shhI2s10s',
|
||||
self.target.kind,
|
||||
carbon_volname, int(voldate),
|
||||
self.volume.fs_type,
|
||||
self.volume.disk_type,
|
||||
self.target.folder_cnid,
|
||||
carbon_filename,
|
||||
self.target.cnid,
|
||||
int(crdate),
|
||||
self.target.creator_code,
|
||||
self.target.type_code,
|
||||
self.target.levels_from,
|
||||
self.target.levels_to,
|
||||
self.volume.attribute_flags,
|
||||
self.volume.fs_id,
|
||||
b'\0'*10))
|
||||
else:
|
||||
b.write(struct.pack(b'>hQ4shIIQI14s',
|
||||
self.target.kind,
|
||||
int(voldate * 65536),
|
||||
self.volume.fs_type,
|
||||
self.volume.disk_type,
|
||||
self.target.folder_cnid,
|
||||
self.target.cnid,
|
||||
int(crdate * 65536),
|
||||
self.volume.attribute_flags,
|
||||
self.volume.fs_id,
|
||||
b'\0'*14))
|
||||
|
||||
# Excuse the odd order; we're copying Finder
|
||||
if self.target.folder_name:
|
||||
carbon_foldername = encode_utf8(self.target.folder_name)\
|
||||
.replace(b':',b'/')
|
||||
b.write(struct.pack(b'>hh', TAG_CARBON_FOLDER_NAME,
|
||||
len(carbon_foldername)))
|
||||
b.write(carbon_foldername)
|
||||
if len(carbon_foldername) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
b.write(struct.pack(b'>hhQhhQ',
|
||||
TAG_HIGH_RES_VOLUME_CREATION_DATE,
|
||||
8, int(voldate * 65536),
|
||||
TAG_HIGH_RES_CREATION_DATE,
|
||||
8, int(crdate * 65536)))
|
||||
|
||||
if self.target.cnid_path:
|
||||
cnid_path = struct.pack('>%uI' % len(self.target.cnid_path),
|
||||
*self.target.cnid_path)
|
||||
b.write(struct.pack(b'>hh', TAG_CNID_PATH,
|
||||
len(cnid_path)))
|
||||
b.write(cnid_path)
|
||||
|
||||
if self.target.carbon_path:
|
||||
carbon_path=encode_utf8(self.target.carbon_path)
|
||||
b.write(struct.pack(b'>hh', TAG_CARBON_PATH,
|
||||
len(carbon_path)))
|
||||
b.write(carbon_path)
|
||||
if len(carbon_path) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.appleshare_info:
|
||||
ai = self.volume.appleshare_info
|
||||
if ai.zone:
|
||||
b.write(struct.pack(b'>hh', TAG_APPLESHARE_ZONE,
|
||||
len(ai.zone)))
|
||||
b.write(ai.zone)
|
||||
if len(ai.zone) & 1:
|
||||
b.write(b'\0')
|
||||
if ai.server:
|
||||
b.write(struct.pack(b'>hh', TAG_APPLESHARE_SERVER_NAME,
|
||||
len(ai.server)))
|
||||
b.write(ai.server)
|
||||
if len(ai.server) & 1:
|
||||
b.write(b'\0')
|
||||
if ai.username:
|
||||
b.write(struct.pack(b'>hh', TAG_APPLESHARE_USERNAME,
|
||||
len(ai.username)))
|
||||
b.write(ai.username)
|
||||
if len(ai.username) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.driver_name:
|
||||
driver_name = encode_utf8(self.volume.driver_name)
|
||||
b.write(struct.pack(b'>hh', TAG_DRIVER_NAME,
|
||||
len(driver_name)))
|
||||
b.write(driver_name)
|
||||
if len(driver_name) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.network_mount_info:
|
||||
b.write(struct.pack(b'>hh', TAG_NETWORK_MOUNT_INFO,
|
||||
len(self.volume.network_mount_info)))
|
||||
b.write(self.volume.network_mount_info)
|
||||
if len(self.volume.network_mount_info) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.dialup_info:
|
||||
b.write(struct.pack(b'>hh', TAG_DIALUP_INFO,
|
||||
len(self.volume.network_mount_info)))
|
||||
b.write(self.volume.network_mount_info)
|
||||
if len(self.volume.network_mount_info) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
utf16 = decode_utf8(self.target.filename)\
|
||||
.replace(':','/').encode('utf-16-be')
|
||||
b.write(struct.pack(b'>hhh', TAG_UNICODE_FILENAME,
|
||||
len(utf16) + 2,
|
||||
len(utf16) // 2))
|
||||
b.write(utf16)
|
||||
|
||||
utf16 = decode_utf8(self.volume.name)\
|
||||
.replace(':','/').encode('utf-16-be')
|
||||
b.write(struct.pack(b'>hhh', TAG_UNICODE_VOLUME_NAME,
|
||||
len(utf16) + 2,
|
||||
len(utf16) // 2))
|
||||
b.write(utf16)
|
||||
|
||||
if self.target.posix_path:
|
||||
posix_path = encode_utf8(self.target.posix_path)
|
||||
b.write(struct.pack(b'>hh', TAG_POSIX_PATH,
|
||||
len(posix_path)))
|
||||
b.write(posix_path)
|
||||
if len(posix_path) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.posix_path:
|
||||
posix_path = encode_utf8(self.volume.posix_path)
|
||||
b.write(struct.pack(b'>hh', TAG_POSIX_PATH_TO_MOUNTPOINT,
|
||||
len(posix_path)))
|
||||
b.write(posix_path)
|
||||
if len(posix_path) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.disk_image_alias:
|
||||
d = self.volume.disk_image_alias.to_bytes()
|
||||
b.write(struct.pack(b'>hh', TAG_RECURSIVE_ALIAS_OF_DISK_IMAGE,
|
||||
len(d)))
|
||||
b.write(d)
|
||||
if len(d) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.target.user_home_prefix_len is not None:
|
||||
b.write(struct.pack(b'>hhh', TAG_USER_HOME_LENGTH_PREFIX,
|
||||
2, self.target.user_home_prefix_len))
|
||||
|
||||
for t,v in self.extra:
|
||||
b.write(struct.pack(b'>hh', t, len(v)))
|
||||
b.write(v)
|
||||
if len(v) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
b.write(struct.pack(b'>hh', -1, 0))
|
||||
|
||||
blen = b.tell() - pos
|
||||
b.seek(pos + 4, os.SEEK_SET)
|
||||
b.write(struct.pack(b'>h', blen))
|
||||
|
||||
def to_bytes(self):
|
||||
"""Returns the binary representation for this :class:`Alias`."""
|
||||
with io.BytesIO() as b:
|
||||
self._to_fd(b)
|
||||
return b.getvalue()
|
||||
|
||||
def __str__(self):
|
||||
return '<Alias target=%s>' % self.target.filename
|
||||
|
||||
def __repr__(self):
|
||||
values = []
|
||||
if self.appinfo != b'\0\0\0\0':
|
||||
values.append('appinfo=%r' % self.appinfo)
|
||||
if self.version != 2:
|
||||
values.append('version=%r' % self.version)
|
||||
if self.volume is not None:
|
||||
values.append('volume=%r' % self.volume)
|
||||
if self.target is not None:
|
||||
values.append('target=%r' % self.target)
|
||||
if self.extra:
|
||||
values.append('extra=%r' % self.extra)
|
||||
return 'Alias(%s)' % ','.join(values)
|
||||
672
mc_test/node_modules/dmg-builder/vendor/mac_alias/bookmark.py
generated
vendored
Executable file
672
mc_test/node_modules/dmg-builder/vendor/mac_alias/bookmark.py
generated
vendored
Executable file
@ -0,0 +1,672 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# This file implements the Apple "bookmark" format, which is the replacement
|
||||
# for the old-fashioned alias format. The details of this format were
|
||||
# reverse engineered; some things are still not entirely clear.
|
||||
#
|
||||
from __future__ import unicode_literals, print_function
|
||||
|
||||
import struct
|
||||
import uuid
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
import pprint
|
||||
|
||||
try:
|
||||
from urlparse import urljoin
|
||||
except ImportError:
|
||||
from urllib.parse import urljoin
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
from . import osx
|
||||
|
||||
def iteritems(x):
|
||||
return x.iteritems()
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
long = int
|
||||
xrange = range
|
||||
def iteritems(x):
|
||||
return x.items()
|
||||
|
||||
from .utils import *
|
||||
|
||||
BMK_DATA_TYPE_MASK = 0xffffff00
|
||||
BMK_DATA_SUBTYPE_MASK = 0x000000ff
|
||||
|
||||
BMK_STRING = 0x0100
|
||||
BMK_DATA = 0x0200
|
||||
BMK_NUMBER = 0x0300
|
||||
BMK_DATE = 0x0400
|
||||
BMK_BOOLEAN = 0x0500
|
||||
BMK_ARRAY = 0x0600
|
||||
BMK_DICT = 0x0700
|
||||
BMK_UUID = 0x0800
|
||||
BMK_URL = 0x0900
|
||||
BMK_NULL = 0x0a00
|
||||
|
||||
BMK_ST_ZERO = 0x0000
|
||||
BMK_ST_ONE = 0x0001
|
||||
|
||||
BMK_BOOLEAN_ST_FALSE = 0x0000
|
||||
BMK_BOOLEAN_ST_TRUE = 0x0001
|
||||
|
||||
# Subtypes for BMK_NUMBER are really CFNumberType values
|
||||
kCFNumberSInt8Type = 1
|
||||
kCFNumberSInt16Type = 2
|
||||
kCFNumberSInt32Type = 3
|
||||
kCFNumberSInt64Type = 4
|
||||
kCFNumberFloat32Type = 5
|
||||
kCFNumberFloat64Type = 6
|
||||
kCFNumberCharType = 7
|
||||
kCFNumberShortType = 8
|
||||
kCFNumberIntType = 9
|
||||
kCFNumberLongType = 10
|
||||
kCFNumberLongLongType = 11
|
||||
kCFNumberFloatType = 12
|
||||
kCFNumberDoubleType = 13
|
||||
kCFNumberCFIndexType = 14
|
||||
kCFNumberNSIntegerType = 15
|
||||
kCFNumberCGFloatType = 16
|
||||
|
||||
# Resource property flags (from CFURLPriv.h)
|
||||
kCFURLResourceIsRegularFile = 0x00000001
|
||||
kCFURLResourceIsDirectory = 0x00000002
|
||||
kCFURLResourceIsSymbolicLink = 0x00000004
|
||||
kCFURLResourceIsVolume = 0x00000008
|
||||
kCFURLResourceIsPackage = 0x00000010
|
||||
kCFURLResourceIsSystemImmutable = 0x00000020
|
||||
kCFURLResourceIsUserImmutable = 0x00000040
|
||||
kCFURLResourceIsHidden = 0x00000080
|
||||
kCFURLResourceHasHiddenExtension = 0x00000100
|
||||
kCFURLResourceIsApplication = 0x00000200
|
||||
kCFURLResourceIsCompressed = 0x00000400
|
||||
kCFURLResourceIsSystemCompressed = 0x00000400
|
||||
kCFURLCanSetHiddenExtension = 0x00000800
|
||||
kCFURLResourceIsReadable = 0x00001000
|
||||
kCFURLResourceIsWriteable = 0x00002000
|
||||
kCFURLResourceIsExecutable = 0x00004000
|
||||
kCFURLIsAliasFile = 0x00008000
|
||||
kCFURLIsMountTrigger = 0x00010000
|
||||
|
||||
# Volume property flags (from CFURLPriv.h)
|
||||
kCFURLVolumeIsLocal = 0x1 #
|
||||
kCFURLVolumeIsAutomount = 0x2 #
|
||||
kCFURLVolumeDontBrowse = 0x4 #
|
||||
kCFURLVolumeIsReadOnly = 0x8 #
|
||||
kCFURLVolumeIsQuarantined = 0x10
|
||||
kCFURLVolumeIsEjectable = 0x20 #
|
||||
kCFURLVolumeIsRemovable = 0x40 #
|
||||
kCFURLVolumeIsInternal = 0x80 #
|
||||
kCFURLVolumeIsExternal = 0x100 #
|
||||
kCFURLVolumeIsDiskImage = 0x200 #
|
||||
kCFURLVolumeIsFileVault = 0x400
|
||||
kCFURLVolumeIsLocaliDiskMirror = 0x800
|
||||
kCFURLVolumeIsiPod = 0x1000 #
|
||||
kCFURLVolumeIsiDisk = 0x2000
|
||||
kCFURLVolumeIsCD = 0x4000
|
||||
kCFURLVolumeIsDVD = 0x8000
|
||||
kCFURLVolumeIsDeviceFileSystem = 0x10000
|
||||
kCFURLVolumeSupportsPersistentIDs = 0x100000000
|
||||
kCFURLVolumeSupportsSearchFS = 0x200000000
|
||||
kCFURLVolumeSupportsExchange = 0x400000000
|
||||
# reserved 0x800000000
|
||||
kCFURLVolumeSupportsSymbolicLinks = 0x1000000000
|
||||
kCFURLVolumeSupportsDenyModes = 0x2000000000
|
||||
kCFURLVolumeSupportsCopyFile = 0x4000000000
|
||||
kCFURLVolumeSupportsReadDirAttr = 0x8000000000
|
||||
kCFURLVolumeSupportsJournaling = 0x10000000000
|
||||
kCFURLVolumeSupportsRename = 0x20000000000
|
||||
kCFURLVolumeSupportsFastStatFS = 0x40000000000
|
||||
kCFURLVolumeSupportsCaseSensitiveNames = 0x80000000000
|
||||
kCFURLVolumeSupportsCasePreservedNames = 0x100000000000
|
||||
kCFURLVolumeSupportsFLock = 0x200000000000
|
||||
kCFURLVolumeHasNoRootDirectoryTimes = 0x400000000000
|
||||
kCFURLVolumeSupportsExtendedSecurity = 0x800000000000
|
||||
kCFURLVolumeSupports2TBFileSize = 0x1000000000000
|
||||
kCFURLVolumeSupportsHardLinks = 0x2000000000000
|
||||
kCFURLVolumeSupportsMandatoryByteRangeLocks = 0x4000000000000
|
||||
kCFURLVolumeSupportsPathFromID = 0x8000000000000
|
||||
# reserved 0x10000000000000
|
||||
kCFURLVolumeIsJournaling = 0x20000000000000
|
||||
kCFURLVolumeSupportsSparseFiles = 0x40000000000000
|
||||
kCFURLVolumeSupportsZeroRuns = 0x80000000000000
|
||||
kCFURLVolumeSupportsVolumeSizes = 0x100000000000000
|
||||
kCFURLVolumeSupportsRemoteEvents = 0x200000000000000
|
||||
kCFURLVolumeSupportsHiddenFiles = 0x400000000000000
|
||||
kCFURLVolumeSupportsDecmpFSCompression = 0x800000000000000
|
||||
kCFURLVolumeHas64BitObjectIDs = 0x1000000000000000
|
||||
kCFURLVolumePropertyFlagsAll = 0xffffffffffffffff
|
||||
|
||||
BMK_URL_ST_ABSOLUTE = 0x0001
|
||||
BMK_URL_ST_RELATIVE = 0x0002
|
||||
|
||||
# Bookmark keys
|
||||
kBookmarkURL = 0x1003 # A URL
|
||||
kBookmarkPath = 0x1004 # Array of path components
|
||||
kBookmarkCNIDPath = 0x1005 # Array of CNIDs
|
||||
kBookmarkFileProperties = 0x1010 # (CFURL rp flags,
|
||||
# CFURL rp flags asked for,
|
||||
# 8 bytes NULL)
|
||||
kBookmarkFileName = 0x1020
|
||||
kBookmarkFileID = 0x1030
|
||||
kBookmarkFileCreationDate = 0x1040
|
||||
# = 0x1054 # ?
|
||||
# = 0x1055 # ?
|
||||
# = 0x1056 # ?
|
||||
# = 0x1101 # ?
|
||||
# = 0x1102 # ?
|
||||
kBookmarkTOCPath = 0x2000 # A list of (TOC id, ?) pairs
|
||||
kBookmarkVolumePath = 0x2002
|
||||
kBookmarkVolumeURL = 0x2005
|
||||
kBookmarkVolumeName = 0x2010
|
||||
kBookmarkVolumeUUID = 0x2011 # Stored (perversely) as a string
|
||||
kBookmarkVolumeSize = 0x2012
|
||||
kBookmarkVolumeCreationDate = 0x2013
|
||||
kBookmarkVolumeProperties = 0x2020 # (CFURL vp flags,
|
||||
# CFURL vp flags asked for,
|
||||
# 8 bytes NULL)
|
||||
kBookmarkVolumeIsRoot = 0x2030 # True if volume is FS root
|
||||
kBookmarkVolumeBookmark = 0x2040 # Embedded bookmark for disk image (TOC id)
|
||||
kBookmarkVolumeMountPoint = 0x2050 # A URL
|
||||
# = 0x2070
|
||||
kBookmarkContainingFolder = 0xc001 # Index of containing folder in path
|
||||
kBookmarkUserName = 0xc011 # User that created bookmark
|
||||
kBookmarkUID = 0xc012 # UID that created bookmark
|
||||
kBookmarkWasFileReference = 0xd001 # True if the URL was a file reference
|
||||
kBookmarkCreationOptions = 0xd010
|
||||
kBookmarkURLLengths = 0xe003 # See below
|
||||
kBookmarkDisplayName = 0xf017
|
||||
kBookmarkIconData = 0xf020
|
||||
kBookmarkIconRef = 0xf021
|
||||
kBookmarkTypeBindingData = 0xf022
|
||||
kBookmarkCreationTime = 0xf030
|
||||
kBookmarkSandboxRwExtension = 0xf080
|
||||
kBookmarkSandboxRoExtension = 0xf081
|
||||
kBookmarkAliasData = 0xfe00
|
||||
|
||||
# Alias for backwards compatibility
|
||||
kBookmarkSecurityExtension = kBookmarkSandboxRwExtension
|
||||
|
||||
# kBookmarkURLLengths is an array that is set if the URL encoded by the
|
||||
# bookmark had a base URL; in that case, each entry is the length of the
|
||||
# base URL in question. Thus a URL
|
||||
#
|
||||
# file:///foo/bar/baz blam/blat.html
|
||||
#
|
||||
# will result in [3, 2], while the URL
|
||||
#
|
||||
# file:///foo bar/baz blam blat.html
|
||||
#
|
||||
# would result in [1, 2, 1, 1]
|
||||
|
||||
|
||||
class Data (object):
|
||||
def __init__(self, bytedata=None):
|
||||
#: The bytes, stored as a byte string
|
||||
self.bytes = bytes(bytedata)
|
||||
|
||||
def __repr__(self):
|
||||
return 'Data(%r)' % self.bytes
|
||||
|
||||
class URL (object):
|
||||
def __init__(self, base, rel=None):
|
||||
if rel is not None:
|
||||
#: The base URL, if any (a :class:`URL`)
|
||||
self.base = base
|
||||
#: The rest of the URL (a string)
|
||||
self.relative = rel
|
||||
else:
|
||||
self.base = None
|
||||
self.relative = base
|
||||
|
||||
@property
|
||||
def absolute(self):
|
||||
"""Return an absolute URL."""
|
||||
if self.base is None:
|
||||
return self.relative
|
||||
else:
|
||||
base_abs = self.base.absolute
|
||||
return urljoin(self.base.absolute, self.relative)
|
||||
|
||||
def __repr__(self):
|
||||
return 'URL(%r)' % self.absolute
|
||||
|
||||
class Bookmark (object):
|
||||
def __init__(self, tocs=None):
|
||||
if tocs is None:
|
||||
#: The TOCs for this Bookmark
|
||||
self.tocs = []
|
||||
else:
|
||||
self.tocs = tocs
|
||||
|
||||
@classmethod
|
||||
def _get_item(cls, data, hdrsize, offset):
|
||||
offset += hdrsize
|
||||
if offset > len(data) - 8:
|
||||
raise ValueError('Offset out of range')
|
||||
|
||||
length,typecode = struct.unpack(b'<II', data[offset:offset+8])
|
||||
|
||||
if len(data) - offset < 8 + length:
|
||||
raise ValueError('Data item truncated')
|
||||
|
||||
databytes = data[offset+8:offset+8+length]
|
||||
|
||||
dsubtype = typecode & BMK_DATA_SUBTYPE_MASK
|
||||
dtype = typecode & BMK_DATA_TYPE_MASK
|
||||
|
||||
if dtype == BMK_STRING:
|
||||
return databytes.decode('utf-8')
|
||||
elif dtype == BMK_DATA:
|
||||
return Data(databytes)
|
||||
elif dtype == BMK_NUMBER:
|
||||
if dsubtype == kCFNumberSInt8Type:
|
||||
return ord(databytes[0])
|
||||
elif dsubtype == kCFNumberSInt16Type:
|
||||
return struct.unpack(b'<h', databytes)[0]
|
||||
elif dsubtype == kCFNumberSInt32Type:
|
||||
return struct.unpack(b'<i', databytes)[0]
|
||||
elif dsubtype == kCFNumberSInt64Type:
|
||||
return struct.unpack(b'<q', databytes)[0]
|
||||
elif dsubtype == kCFNumberFloat32Type:
|
||||
return struct.unpack(b'<f', databytes)[0]
|
||||
elif dsubtype == kCFNumberFloat64Type:
|
||||
return struct.unpack(b'<d', databytes)[0]
|
||||
elif dtype == BMK_DATE:
|
||||
# Yes, dates really are stored as *BIG-endian* doubles; everything
|
||||
# else is little-endian
|
||||
secs = datetime.timedelta(seconds=struct.unpack(b'>d', databytes)[0])
|
||||
return osx_epoch + secs
|
||||
elif dtype == BMK_BOOLEAN:
|
||||
if dsubtype == BMK_BOOLEAN_ST_TRUE:
|
||||
return True
|
||||
elif dsubtype == BMK_BOOLEAN_ST_FALSE:
|
||||
return False
|
||||
elif dtype == BMK_UUID:
|
||||
return uuid.UUID(bytes=databytes)
|
||||
elif dtype == BMK_URL:
|
||||
if dsubtype == BMK_URL_ST_ABSOLUTE:
|
||||
return URL(databytes.decode('utf-8'))
|
||||
elif dsubtype == BMK_URL_ST_RELATIVE:
|
||||
baseoff,reloff = struct.unpack(b'<II', databytes)
|
||||
base = cls._get_item(data, hdrsize, baseoff)
|
||||
rel = cls._get_item(data, hdrsize, reloff)
|
||||
return URL(base, rel)
|
||||
elif dtype == BMK_ARRAY:
|
||||
result = []
|
||||
for aoff in xrange(offset+8,offset+8+length,4):
|
||||
eltoff, = struct.unpack(b'<I', data[aoff:aoff+4])
|
||||
result.append(cls._get_item(data, hdrsize, eltoff))
|
||||
return result
|
||||
elif dtype == BMK_DICT:
|
||||
result = {}
|
||||
for eoff in xrange(offset+8,offset+8+length,8):
|
||||
keyoff,valoff = struct.unpack(b'<II', data[eoff:eoff+8])
|
||||
key = cls._get_item(data, hdrsize, keyoff)
|
||||
val = cls._get_item(data, hdrsize, valoff)
|
||||
result[key] = val
|
||||
return result
|
||||
elif dtype == BMK_NULL:
|
||||
return None
|
||||
|
||||
print('Unknown data type %08x' % typecode)
|
||||
return (typecode, databytes)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data):
|
||||
"""Create a :class:`Bookmark` given byte data."""
|
||||
|
||||
if len(data) < 16:
|
||||
raise ValueError('Not a bookmark file (too short)')
|
||||
|
||||
if isinstance(data, bytearray):
|
||||
data = bytes(data)
|
||||
|
||||
magic,size,dummy,hdrsize = struct.unpack(b'<4sIII', data[0:16])
|
||||
|
||||
if magic not in (b'book', b'alis'):
|
||||
raise ValueError('Not a bookmark file (bad magic) %r' % magic)
|
||||
|
||||
if hdrsize < 16:
|
||||
raise ValueError('Not a bookmark file (header size too short)')
|
||||
|
||||
if hdrsize > size:
|
||||
raise ValueError('Not a bookmark file (header size too large)')
|
||||
|
||||
if size != len(data):
|
||||
raise ValueError('Not a bookmark file (truncated)')
|
||||
|
||||
tocoffset, = struct.unpack(b'<I', data[hdrsize:hdrsize+4])
|
||||
|
||||
tocs = []
|
||||
|
||||
while tocoffset != 0:
|
||||
tocbase = hdrsize + tocoffset
|
||||
if tocoffset > size - hdrsize \
|
||||
or size - tocbase < 20:
|
||||
raise ValueError('TOC offset out of range')
|
||||
|
||||
tocsize,tocmagic,tocid,nexttoc,toccount \
|
||||
= struct.unpack(b'<IIIII',
|
||||
data[tocbase:tocbase+20])
|
||||
|
||||
if tocmagic != 0xfffffffe:
|
||||
break
|
||||
|
||||
tocsize += 8
|
||||
|
||||
if size - tocbase < tocsize:
|
||||
raise ValueError('TOC truncated')
|
||||
|
||||
if tocsize < 12 * toccount:
|
||||
raise ValueError('TOC entries overrun TOC size')
|
||||
|
||||
toc = {}
|
||||
for n in xrange(0,toccount):
|
||||
ebase = tocbase + 20 + 12 * n
|
||||
eid,eoffset,edummy = struct.unpack(b'<III',
|
||||
data[ebase:ebase+12])
|
||||
|
||||
if eid & 0x80000000:
|
||||
eid = cls._get_item(data, hdrsize, eid & 0x7fffffff)
|
||||
|
||||
toc[eid] = cls._get_item(data, hdrsize, eoffset)
|
||||
|
||||
tocs.append((tocid, toc))
|
||||
|
||||
tocoffset = nexttoc
|
||||
|
||||
return cls(tocs)
|
||||
|
||||
def __getitem__(self, key):
|
||||
for tid,toc in self.tocs:
|
||||
if key in toc:
|
||||
return toc[key]
|
||||
raise KeyError('Key not found')
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if len(self.tocs) == 0:
|
||||
self.tocs = [(1, {})]
|
||||
self.tocs[0][1][key] = value
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Lookup the value for a given key, returning a default if not
|
||||
present."""
|
||||
for tid,toc in self.tocs:
|
||||
if key in toc:
|
||||
return toc[key]
|
||||
return default
|
||||
|
||||
@classmethod
|
||||
def _encode_item(cls, item, offset):
|
||||
if item is True:
|
||||
result = struct.pack(b'<II', 0, BMK_BOOLEAN | BMK_BOOLEAN_ST_TRUE)
|
||||
elif item is False:
|
||||
result = struct.pack(b'<II', 0, BMK_BOOLEAN | BMK_BOOLEAN_ST_FALSE)
|
||||
elif isinstance(item, unicode):
|
||||
encoded = item.encode('utf-8')
|
||||
result = (struct.pack(b'<II', len(encoded), BMK_STRING | BMK_ST_ONE)
|
||||
+ encoded)
|
||||
elif isinstance(item, bytes):
|
||||
result = (struct.pack(b'<II', len(item), BMK_STRING | BMK_ST_ONE)
|
||||
+ item)
|
||||
elif isinstance(item, Data):
|
||||
result = (struct.pack(b'<II', len(item.bytes),
|
||||
BMK_DATA | BMK_ST_ONE)
|
||||
+ bytes(item.bytes))
|
||||
elif isinstance(item, bytearray):
|
||||
result = (struct.pack(b'<II', len(item),
|
||||
BMK_DATA | BMK_ST_ONE)
|
||||
+ bytes(item))
|
||||
elif isinstance(item, int) or isinstance(item, long):
|
||||
if item > -0x80000000 and item < 0x7fffffff:
|
||||
result = struct.pack(b'<IIi', 4,
|
||||
BMK_NUMBER | kCFNumberSInt32Type, item)
|
||||
else:
|
||||
result = struct.pack(b'<IIq', 8,
|
||||
BMK_NUMBER | kCFNumberSInt64Type, item)
|
||||
elif isinstance(item, float):
|
||||
result = struct.pack(b'<IId', 8,
|
||||
BMK_NUMBER | kCFNumberFloat64Type, item)
|
||||
elif isinstance(item, datetime.datetime):
|
||||
secs = item - osx_epoch
|
||||
result = struct.pack(b'<II', 8, BMK_DATE | BMK_ST_ZERO) \
|
||||
+ struct.pack(b'>d', float(secs.total_seconds()))
|
||||
elif isinstance(item, uuid.UUID):
|
||||
result = struct.pack(b'<II', 16, BMK_UUID | BMK_ST_ONE) \
|
||||
+ item.bytes
|
||||
elif isinstance(item, URL):
|
||||
if item.base:
|
||||
baseoff = offset + 16
|
||||
reloff, baseenc = cls._encode_item(item.base, baseoff)
|
||||
xoffset, relenc = cls._encode_item(item.relative, reloff)
|
||||
result = b''.join([
|
||||
struct.pack(b'<IIII', 8, BMK_URL | BMK_URL_ST_RELATIVE,
|
||||
baseoff, reloff),
|
||||
baseenc,
|
||||
relenc])
|
||||
else:
|
||||
encoded = item.relative.encode('utf-8')
|
||||
result = struct.pack(b'<II', len(encoded),
|
||||
BMK_URL | BMK_URL_ST_ABSOLUTE) + encoded
|
||||
elif isinstance(item, list):
|
||||
ioffset = offset + 8 + len(item) * 4
|
||||
result = [struct.pack(b'<II', len(item) * 4, BMK_ARRAY | BMK_ST_ONE)]
|
||||
enc = []
|
||||
for elt in item:
|
||||
result.append(struct.pack(b'<I', ioffset))
|
||||
ioffset, ienc = cls._encode_item(elt, ioffset)
|
||||
enc.append(ienc)
|
||||
result = b''.join(result + enc)
|
||||
elif isinstance(item, dict):
|
||||
ioffset = offset + 8 + len(item) * 8
|
||||
result = [struct.pack(b'<II', len(item) * 8, BMK_DICT | BMK_ST_ONE)]
|
||||
enc = []
|
||||
for k,v in iteritems(item):
|
||||
result.append(struct.pack(b'<I', ioffset))
|
||||
ioffset, ienc = cls._encode_item(k, ioffset)
|
||||
enc.append(ienc)
|
||||
result.append(struct.pack(b'<I', ioffset))
|
||||
ioffset, ienc = cls._encode_item(v, ioffset)
|
||||
enc.append(ienc)
|
||||
result = b''.join(result + enc)
|
||||
elif item is None:
|
||||
result = struct.pack(b'<II', 0, BMK_NULL | BMK_ST_ONE)
|
||||
else:
|
||||
raise ValueError('Unknown item type when encoding: %s' % item)
|
||||
|
||||
offset += len(result)
|
||||
|
||||
# Pad to a multiple of 4 bytes
|
||||
if offset & 3:
|
||||
extra = 4 - (offset & 3)
|
||||
result += b'\0' * extra
|
||||
offset += extra
|
||||
|
||||
return (offset, result)
|
||||
|
||||
def to_bytes(self):
|
||||
"""Convert this :class:`Bookmark` to a byte representation."""
|
||||
|
||||
result = []
|
||||
tocs = []
|
||||
offset = 4 # For the offset to the first TOC
|
||||
|
||||
# Generate the data and build the TOCs
|
||||
for tid,toc in self.tocs:
|
||||
entries = []
|
||||
|
||||
for k,v in iteritems(toc):
|
||||
if isinstance(k, (str, unicode)):
|
||||
noffset = offset
|
||||
voffset, enc = self._encode_item(k, offset)
|
||||
result.append(enc)
|
||||
offset, enc = self._encode_item(v, voffset)
|
||||
result.append(enc)
|
||||
entries.append((noffset | 0x80000000, voffset))
|
||||
else:
|
||||
entries.append((k, offset))
|
||||
offset, enc = self._encode_item(v, offset)
|
||||
result.append(enc)
|
||||
|
||||
# TOC entries must be sorted - CoreServicesInternal does a
|
||||
# binary search to find data
|
||||
entries.sort()
|
||||
|
||||
tocs.append((tid, b''.join([struct.pack(b'<III',k,o,0)
|
||||
for k,o in entries])))
|
||||
|
||||
first_toc_offset = offset
|
||||
|
||||
# Now generate the TOC headers
|
||||
for ndx,toc in enumerate(tocs):
|
||||
tid, data = toc
|
||||
if ndx == len(tocs) - 1:
|
||||
next_offset = 0
|
||||
else:
|
||||
next_offset = offset + 20 + len(data)
|
||||
|
||||
result.append(struct.pack(b'<IIIII', len(data) - 8,
|
||||
0xfffffffe,
|
||||
tid,
|
||||
next_offset,
|
||||
len(data) // 12))
|
||||
result.append(data)
|
||||
|
||||
offset += 20 + len(data)
|
||||
|
||||
# Finally, add the header (and the first TOC offset, which isn't part
|
||||
# of the header, but goes just after it)
|
||||
header = struct.pack(b'<4sIIIQQQQI', b'book',
|
||||
offset + 48,
|
||||
0x10040000,
|
||||
48,
|
||||
0, 0, 0, 0, first_toc_offset)
|
||||
|
||||
result.insert(0, header)
|
||||
|
||||
return b''.join(result)
|
||||
|
||||
@classmethod
|
||||
def for_file(cls, path):
|
||||
"""Construct a :class:`Bookmark` for a given file."""
|
||||
|
||||
# Find the filesystem
|
||||
st = osx.statfs(path)
|
||||
vol_path = st.f_mntonname.decode('utf-8')
|
||||
|
||||
# Grab its attributes
|
||||
attrs = [osx.ATTR_CMN_CRTIME,
|
||||
osx.ATTR_VOL_SIZE
|
||||
| osx.ATTR_VOL_NAME
|
||||
| osx.ATTR_VOL_UUID,
|
||||
0, 0, 0]
|
||||
volinfo = osx.getattrlist(vol_path, attrs, 0)
|
||||
|
||||
vol_crtime = volinfo[0]
|
||||
vol_size = volinfo[1]
|
||||
vol_name = volinfo[2]
|
||||
vol_uuid = volinfo[3]
|
||||
|
||||
# Also grab various attributes of the file
|
||||
attrs = [(osx.ATTR_CMN_OBJTYPE
|
||||
| osx.ATTR_CMN_CRTIME
|
||||
| osx.ATTR_CMN_FILEID), 0, 0, 0, 0]
|
||||
info = osx.getattrlist(path, attrs, osx.FSOPT_NOFOLLOW)
|
||||
|
||||
cnid = info[2]
|
||||
crtime = info[1]
|
||||
|
||||
if info[0] == osx.VREG:
|
||||
flags = kCFURLResourceIsRegularFile
|
||||
elif info[0] == osx.VDIR:
|
||||
flags = kCFURLResourceIsDirectory
|
||||
elif info[0] == osx.VLNK:
|
||||
flags = kCFURLResourceIsSymbolicLink
|
||||
else:
|
||||
flags = kCFURLResourceIsRegularFile
|
||||
|
||||
dirname, filename = os.path.split(path)
|
||||
|
||||
relcount = 0
|
||||
if not os.path.isabs(dirname):
|
||||
curdir = os.getcwd()
|
||||
head, tail = os.path.split(curdir)
|
||||
relcount = 0
|
||||
while head and tail:
|
||||
relcount += 1
|
||||
head, tail = os.path.split(head)
|
||||
dirname = os.path.join(curdir, dirname)
|
||||
|
||||
foldername = os.path.basename(dirname)
|
||||
|
||||
rel_path = os.path.relpath(path, vol_path)
|
||||
|
||||
# Build the path arrays
|
||||
name_path = []
|
||||
cnid_path = []
|
||||
head, tail = os.path.split(rel_path)
|
||||
if not tail:
|
||||
head, tail = os.path.split(head)
|
||||
while head or tail:
|
||||
if head:
|
||||
attrs = [osx.ATTR_CMN_FILEID, 0, 0, 0, 0]
|
||||
info = osx.getattrlist(os.path.join(vol_path, head), attrs, 0)
|
||||
cnid_path.insert(0, info[0])
|
||||
head, tail = os.path.split(head)
|
||||
name_path.insert(0, tail)
|
||||
else:
|
||||
head, tail = os.path.split(head)
|
||||
name_path.append(filename)
|
||||
cnid_path.append(cnid)
|
||||
|
||||
url_lengths = [relcount, len(name_path) - relcount]
|
||||
|
||||
fileprops = Data(struct.pack(b'<QQQ', flags, 0x0f, 0))
|
||||
volprops = Data(struct.pack(b'<QQQ', 0x81 | kCFURLVolumeSupportsPersistentIDs,
|
||||
0x13ef | kCFURLVolumeSupportsPersistentIDs, 0))
|
||||
|
||||
toc = {
|
||||
kBookmarkPath: name_path,
|
||||
kBookmarkCNIDPath: cnid_path,
|
||||
kBookmarkFileCreationDate: crtime,
|
||||
kBookmarkFileProperties: fileprops,
|
||||
kBookmarkContainingFolder: len(name_path) - 2,
|
||||
kBookmarkVolumePath: vol_path,
|
||||
kBookmarkVolumeIsRoot: vol_path == '/',
|
||||
kBookmarkVolumeURL: URL('file://' + vol_path),
|
||||
kBookmarkVolumeName: vol_name,
|
||||
kBookmarkVolumeSize: vol_size,
|
||||
kBookmarkVolumeCreationDate: vol_crtime,
|
||||
kBookmarkVolumeUUID: str(vol_uuid).upper(),
|
||||
kBookmarkVolumeProperties: volprops,
|
||||
kBookmarkCreationOptions: 512,
|
||||
kBookmarkWasFileReference: True,
|
||||
kBookmarkUserName: 'unknown',
|
||||
kBookmarkUID: 99,
|
||||
}
|
||||
|
||||
if relcount:
|
||||
toc[kBookmarkURLLengths] = url_lengths
|
||||
|
||||
return Bookmark([(1, toc)])
|
||||
|
||||
def __repr__(self):
|
||||
result = ['Bookmark([']
|
||||
for tid,toc in self.tocs:
|
||||
result.append('(0x%x, {\n' % tid)
|
||||
for k,v in iteritems(toc):
|
||||
if isinstance(k, (str, unicode)):
|
||||
kf = repr(k)
|
||||
else:
|
||||
kf = '0x%04x' % k
|
||||
result.append(' %s: %r\n' % (kf, v))
|
||||
result.append('}),\n')
|
||||
result.append('])')
|
||||
|
||||
return ''.join(result)
|
||||
970
mc_test/node_modules/dmg-builder/vendor/mac_alias/osx.py
generated
vendored
Executable file
970
mc_test/node_modules/dmg-builder/vendor/mac_alias/osx.py
generated
vendored
Executable file
@ -0,0 +1,970 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from ctypes import *
|
||||
import struct
|
||||
import os
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
from .utils import *
|
||||
|
||||
libc = cdll.LoadLibrary('/usr/lib/libc.dylib')
|
||||
|
||||
# Constants
|
||||
FSOPT_NOFOLLOW = 0x00000001
|
||||
FSOPT_NOINMEMUPDATE = 0x00000002
|
||||
FSOPT_REPORT_FULLSIZE = 0x00000004
|
||||
FSOPT_PACK_INVAL_ATTRS = 0x00000008
|
||||
FSOPT_ATTR_CMN_EXTENDED = 0x00000020
|
||||
FSOPT_RETURN_REALDEV = 0x00000200
|
||||
|
||||
VOL_CAPABILITIES_FORMAT = 0
|
||||
VOL_CAPABILITIES_INTERFACES = 1
|
||||
|
||||
VOL_CAP_FMT_PERSISTENTOBJECTIDS = 0x00000001
|
||||
VOL_CAP_FMT_SYMBOLICLINKS = 0x00000002
|
||||
VOL_CAP_FMT_HARDLINKS = 0x00000004
|
||||
VOL_CAP_FMT_JOURNAL = 0x00000008
|
||||
VOL_CAP_FMT_JOURNAL_ACTIVE = 0x00000010
|
||||
VOL_CAP_FMT_NO_ROOT_TIMES = 0x00000020
|
||||
VOL_CAP_FMT_SPARSE_FILES = 0x00000040
|
||||
VOL_CAP_FMT_ZERO_RUNS = 0x00000080
|
||||
VOL_CAP_FMT_CASE_SENSITIVE = 0x00000100
|
||||
VOL_CAP_FMT_CASE_PRESERVING = 0x00000200
|
||||
VOL_CAP_FMT_FAST_STATFS = 0x00000400
|
||||
VOL_CAP_FMT_2TB_FILESIZE = 0x00000800
|
||||
VOL_CAP_FMT_OPENDENYMODES = 0x00001000
|
||||
VOL_CAP_FMT_HIDDEN_FILES = 0x00002000
|
||||
VOL_CAP_FMT_PATH_FROM_ID = 0x00004000
|
||||
VOL_CAP_FMT_NO_VOLUME_SIZES = 0x00008000
|
||||
VOL_CAP_FMT_DECMPFS_COMPRESSION = 0x00010000
|
||||
VOL_CAP_FMT_64BIT_OBJECT_IDS = 0x00020000
|
||||
|
||||
VOL_CAP_INT_SEARCHFS = 0x00000001
|
||||
VOL_CAP_INT_ATTRLIST = 0x00000002
|
||||
VOL_CAP_INT_NFSEXPORT = 0x00000004
|
||||
VOL_CAP_INT_READDIRATTR = 0x00000008
|
||||
VOL_CAP_INT_EXCHANGEDATA = 0x00000010
|
||||
VOL_CAP_INT_COPYFILE = 0x00000020
|
||||
VOL_CAP_INT_ALLOCATE = 0x00000040
|
||||
VOL_CAP_INT_VOL_RENAME = 0x00000080
|
||||
VOL_CAP_INT_ADVLOCK = 0x00000100
|
||||
VOL_CAP_INT_FLOCK = 0x00000200
|
||||
VOL_CAP_INT_EXTENDED_SECURITY = 0x00000400
|
||||
VOL_CAP_INT_USERACCESS = 0x00000800
|
||||
VOL_CAP_INT_MANLOCK = 0x00001000
|
||||
VOL_CAP_INT_NAMEDSTREAMS = 0x00002000
|
||||
VOL_CAP_INT_EXTENDED_ATTR = 0x00004000
|
||||
VOL_CAP_INT_CLONE = 0x00010000
|
||||
VOL_CAP_INT_SNAPSHOT = 0x00020000
|
||||
VOL_CAP_INT_RENAME_SWAP = 0x00040000
|
||||
VOL_CAP_INT_RENAME_EXCL = 0x00080000
|
||||
VOL_CAP_INT_RENAME_OPENFAIL = 0x00100000
|
||||
|
||||
ATTR_CMN_NAME = 0x00000001
|
||||
ATTR_CMN_DEVID = 0x00000002
|
||||
ATTR_CMN_FSID = 0x00000004
|
||||
ATTR_CMN_OBJTYPE = 0x00000008
|
||||
ATTR_CMN_OBJTAG = 0x00000010
|
||||
ATTR_CMN_OBJID = 0x00000020
|
||||
ATTR_CMN_OBJPERMANENTID = 0x00000040
|
||||
ATTR_CMN_PAROBJID = 0x00000080
|
||||
ATTR_CMN_SCRIPT = 0x00000100
|
||||
ATTR_CMN_CRTIME = 0x00000200
|
||||
ATTR_CMN_MODTIME = 0x00000400
|
||||
ATTR_CMN_CHGTIME = 0x00000800
|
||||
ATTR_CMN_ACCTIME = 0x00001000
|
||||
ATTR_CMN_BKUPTIME = 0x00002000
|
||||
ATTR_CMN_FNDRINFO = 0x00004000
|
||||
ATTR_CMN_OWNERID = 0x00008000
|
||||
ATTR_CMN_GRPID = 0x00010000
|
||||
ATTR_CMN_ACCESSMASK = 0x00020000
|
||||
ATTR_CMN_FLAGS = 0x00040000
|
||||
ATTR_CMN_GEN_COUNT = 0x00080000
|
||||
ATTR_CMN_DOCUMENT_ID = 0x00100000
|
||||
ATTR_CMN_USERACCESS = 0x00200000
|
||||
ATTR_CMN_EXTENDED_SECURITY = 0x00400000
|
||||
ATTR_CMN_UUID = 0x00800000
|
||||
ATTR_CMN_GRPUUID = 0x01000000
|
||||
ATTR_CMN_FILEID = 0x02000000
|
||||
ATTR_CMN_PARENTID = 0x04000000
|
||||
ATTR_CMN_FULLPATH = 0x08000000
|
||||
ATTR_CMN_ADDEDTIME = 0x10000000
|
||||
ATTR_CMN_ERROR = 0x20000000
|
||||
ATTR_CMN_DATA_PROTECT_FLAGS = 0x40000000
|
||||
ATTR_CMN_RETURNED_ATTRS = 0x80000000
|
||||
ATTR_CMN_ALL_ATTRS = 0xffffffff
|
||||
|
||||
ATTR_CMN_VALIDMASK = 0xffffffff
|
||||
ATTR_CMN_SETMASK = 0x51c7ff00
|
||||
ATTR_CMN_VOLSETMASK = 0x00006700
|
||||
|
||||
ATTR_VOL_FSTYPE = 0x00000001
|
||||
ATTR_VOL_SIGNATURE = 0x00000002
|
||||
ATTR_VOL_SIZE = 0x00000004
|
||||
ATTR_VOL_SPACEFREE = 0x00000008
|
||||
ATTR_VOL_SPACEAVAIL = 0x00000010
|
||||
ATTR_VOL_MINALLOCATION = 0x00000020
|
||||
ATTR_VOL_ALLOCATIONCLUMP = 0x00000040
|
||||
ATTR_VOL_IOBLOCKSIZE = 0x00000080
|
||||
ATTR_VOL_OBJCOUNT = 0x00000100
|
||||
ATTR_VOL_FILECOUNT = 0x00000200
|
||||
ATTR_VOL_DIRCOUNT = 0x00000400
|
||||
ATTR_VOL_MAXOBJCOUNT = 0x00000800
|
||||
ATTR_VOL_MOUNTPOINT = 0x00001000
|
||||
ATTR_VOL_NAME = 0x00002000
|
||||
ATTR_VOL_MOUNTFLAGS = 0x00004000
|
||||
ATTR_VOL_MOUNTEDDEVICE = 0x00008000
|
||||
ATTR_VOL_ENCODINGSUSED = 0x00010000
|
||||
ATTR_VOL_CAPABILITIES = 0x00020000
|
||||
ATTR_VOL_UUID = 0x00040000
|
||||
ATTR_VOL_QUOTA_SIZE = 0x10000000
|
||||
ATTR_VOL_RESERVED_SIZE = 0x20000000
|
||||
ATTR_VOL_ATTRIBUTES = 0x40000000
|
||||
ATTR_VOL_INFO = 0x80000000
|
||||
ATTR_VOL_ALL_ATTRS = 0xf007ffff
|
||||
|
||||
ATTR_DIR_LINKCOUNT = 0x00000001
|
||||
ATTR_DIR_ENTRYCOUNT = 0x00000002
|
||||
ATTR_DIR_MOUNTSTATUS = 0x00000004
|
||||
DIR_MNTSTATUS_MNTPOINT = 0x00000001
|
||||
DIR_MNTSTATUS_TRIGGER = 0x00000002
|
||||
ATTR_DIR_ALLOCSIZE = 0x00000008
|
||||
ATTR_DIR_IOBLOCKSIZE = 0x00000010
|
||||
ATTR_DIR_DATALENGTH = 0x00000020
|
||||
ATTR_DIR_ALL_ATTRS = 0x0000003f
|
||||
|
||||
ATTR_DIR_VALIDMASK = 0x0000003f
|
||||
ATTR_DIR_SETMASK = 0x00000000
|
||||
|
||||
ATTR_FILE_LINKCOUNT = 0x00000001
|
||||
ATTR_FILE_TOTALSIZE = 0x00000002
|
||||
ATTR_FILE_ALLOCSIZE = 0x00000004
|
||||
ATTR_FILE_IOBLOCKSIZE = 0x00000008
|
||||
ATTR_FILE_DEVTYPE = 0x00000020
|
||||
ATTR_FILE_FORKCOUNT = 0x00000080
|
||||
ATTR_FILE_FORKLIST = 0x00000100
|
||||
ATTR_FILE_DATALENGTH = 0x00000200
|
||||
ATTR_FILE_DATAALLOCSIZE = 0x00000400
|
||||
ATTR_FILE_RSRCLENGTH = 0x00001000
|
||||
ATTR_FILE_RSRCALLOCSIZE = 0x00002000
|
||||
ATTR_FILE_ALL_ATTRS = 0x000037ff
|
||||
|
||||
ATTR_FILE_VALIDMASK = 0x000037ff
|
||||
ATTR_FILE_SETMASK = 0x00000020
|
||||
|
||||
# These are deprecated
|
||||
ATTR_FORK_TOTALSIZE = 0x00000001
|
||||
ATTR_FORK_ALLOCSIZE = 0x00000002
|
||||
ATTR_FORK_ALL_ATTRS = 0x00000003
|
||||
|
||||
# These go in the fork attribute field
|
||||
ATTR_CMNEXT_RELPATH = 0x00000004
|
||||
ATTR_CMNEXT_PRIVATESIZE = 0x00000008
|
||||
ATTR_CMNEXT_LINKID = 0x0000010
|
||||
ATTR_CMNEXT_NOFIRMLINKPATH = 0x00000020
|
||||
ATTR_CMNEXT_REALDEVID = 0x00000040
|
||||
ATTR_CMNEXT_REALFSID = 0x00000080
|
||||
ATTR_CMNEXT_CLONEID = 0x00000100
|
||||
ATTR_CMNEXT_EXT_FLAGS = 0x00000200
|
||||
ATTR_CMNEXT_RECURSIVE_GENCOUNT = 0x00000400
|
||||
ATTR_CMNEXT_ALL_ATTRS = 0x000007fc
|
||||
|
||||
ATTR_CMNEXT_VALIDMASK = 0x000007fc
|
||||
ATTR_CMNEXT_SETMASK = 0x00000000
|
||||
|
||||
ATTR_FORK_VALIDMASK = 0x00000003
|
||||
ATTR_FORK_SETMASK = 0x00000000
|
||||
|
||||
# These can't be used
|
||||
ATTR_CMN_NAMEDATTRCOUNT = 0x00080000
|
||||
ATTR_CMN_NAMEDATTRLIST = 0x00100000
|
||||
ATTR_FILE_CLUMPSIZE = 0x00000010
|
||||
ATTR_FILE_FILETYPE = 0x00000040
|
||||
ATTR_FILE_DATAEXTENTS = 0x00000800
|
||||
ATTR_FILE_RSRCEXTENTS = 0x00004000
|
||||
|
||||
class attrlist(Structure):
|
||||
_fields_ = [('bitmapcount', c_ushort),
|
||||
('reserved', c_ushort),
|
||||
('commonattr', c_uint),
|
||||
('volattr', c_uint),
|
||||
('dirattr', c_uint),
|
||||
('fileattr', c_uint),
|
||||
('forkattr', c_uint)]
|
||||
|
||||
class attribute_set_t(Structure):
|
||||
_fields_ = [('commonattr', c_uint),
|
||||
('volattr', c_uint),
|
||||
('dirattr', c_uint),
|
||||
('fileattr', c_uint),
|
||||
('forkattr', c_uint)]
|
||||
|
||||
class fsobj_id_t(Structure):
|
||||
_fields_ = [('fid_objno', c_uint),
|
||||
('fid_generation', c_uint)]
|
||||
|
||||
class timespec(Structure):
|
||||
_fields_ = [('tv_sec', c_long),
|
||||
('tv_nsec', c_long)]
|
||||
|
||||
class attrreference_t(Structure):
|
||||
_fields_ = [('attr_dataoffset', c_int),
|
||||
('attr_length', c_uint)]
|
||||
|
||||
class fsid_t(Structure):
|
||||
_fields_ = [('val', c_uint * 2)]
|
||||
|
||||
class guid_t(Structure):
|
||||
_fields_ = [('g_guid', c_byte*16)]
|
||||
|
||||
class kauth_ace(Structure):
|
||||
_fields_ = [('ace_applicable', guid_t),
|
||||
('ace_flags', c_uint)]
|
||||
|
||||
class kauth_acl(Structure):
|
||||
_fields_ = [('acl_entrycount', c_uint),
|
||||
('acl_flags', c_uint),
|
||||
('acl_ace', kauth_ace * 128)]
|
||||
|
||||
class kauth_filesec(Structure):
|
||||
_fields_ = [('fsec_magic', c_uint),
|
||||
('fsec_owner', guid_t),
|
||||
('fsec_group', guid_t),
|
||||
('fsec_acl', kauth_acl)]
|
||||
|
||||
class diskextent(Structure):
|
||||
_fields_ = [('startblock', c_uint),
|
||||
('blockcount', c_uint)]
|
||||
|
||||
OSType = c_uint
|
||||
UInt16 = c_ushort
|
||||
SInt16 = c_short
|
||||
SInt32 = c_int
|
||||
|
||||
class Point(Structure):
|
||||
_fields_ = [('x', SInt16),
|
||||
('y', SInt16)]
|
||||
class Rect(Structure):
|
||||
_fields_ = [('x', SInt16),
|
||||
('y', SInt16),
|
||||
('w', SInt16),
|
||||
('h', SInt16)]
|
||||
class FileInfo(Structure):
|
||||
_fields_ = [('fileType', OSType),
|
||||
('fileCreator', OSType),
|
||||
('finderFlags', UInt16),
|
||||
('location', Point),
|
||||
('reservedField', UInt16),
|
||||
('reserved1', SInt16 * 4),
|
||||
('extendedFinderFlags', UInt16),
|
||||
('reserved2', SInt16),
|
||||
('putAwayFolderID', SInt32)]
|
||||
class FolderInfo(Structure):
|
||||
_fields_ = [('windowBounds', Rect),
|
||||
('finderFlags', UInt16),
|
||||
('location', Point),
|
||||
('reservedField', UInt16),
|
||||
('scrollPosition', Point),
|
||||
('reserved1', SInt32),
|
||||
('extendedFinderFlags', UInt16),
|
||||
('reserved2', SInt16),
|
||||
('putAwayFolderID', SInt32)]
|
||||
class FinderInfo(Union):
|
||||
_fields_ = [('fileInfo', FileInfo),
|
||||
('folderInfo', FolderInfo)]
|
||||
|
||||
extentrecord = diskextent * 8
|
||||
|
||||
vol_capabilities_set_t = c_uint * 4
|
||||
|
||||
class vol_capabilities_attr_t(Structure):
|
||||
_fields_ = [('capabilities', vol_capabilities_set_t),
|
||||
('valid', vol_capabilities_set_t)]
|
||||
|
||||
class vol_attributes_attr_t(Structure):
|
||||
_fields_ = [('validattr', attribute_set_t),
|
||||
('nativeattr', attribute_set_t)]
|
||||
|
||||
dev_t = c_uint
|
||||
|
||||
fsobj_type_t = c_uint
|
||||
|
||||
VNON = 0
|
||||
VREG = 1
|
||||
VDIR = 2
|
||||
VBLK = 3
|
||||
VCHR = 4
|
||||
VLNK = 5
|
||||
VSOCK = 6
|
||||
VFIFO = 7
|
||||
VBAD = 8
|
||||
VSTR = 9
|
||||
VCPLX = 10
|
||||
|
||||
fsobj_tag_t = c_uint
|
||||
|
||||
VT_NON = 0
|
||||
VT_UFS = 1
|
||||
VT_NFS = 2
|
||||
VT_MFS = 3
|
||||
VT_MSDOSFS = 4
|
||||
VT_LFS = 5
|
||||
VT_LOFS = 6
|
||||
VT_FDESC = 7
|
||||
VT_PORTAL = 8
|
||||
VT_NULL = 9
|
||||
VT_UMAP = 10
|
||||
VT_KERNFS = 11
|
||||
VT_PROCFS = 12
|
||||
VT_AFS = 13
|
||||
VT_ISOFS = 14
|
||||
VT_UNION = 15
|
||||
VT_HFS = 16
|
||||
VT_ZFS = 17
|
||||
VT_DEVFS = 18
|
||||
VT_WEBDAV = 19
|
||||
VT_UDF = 20
|
||||
VT_AFP = 21
|
||||
VT_CDDA = 22
|
||||
VT_CIFS = 23
|
||||
VT_OTHER = 24
|
||||
|
||||
fsfile_type_t = c_uint
|
||||
fsvolid_t = c_uint
|
||||
text_encoding_t = c_uint
|
||||
uid_t = c_uint
|
||||
gid_t = c_uint
|
||||
int32_t = c_int
|
||||
uint32_t = c_uint
|
||||
int64_t = c_longlong
|
||||
uint64_t = c_ulonglong
|
||||
off_t = c_long
|
||||
size_t = c_ulong
|
||||
uuid_t = c_byte*16
|
||||
|
||||
NAME_MAX = 255
|
||||
PATH_MAX = 1024
|
||||
FSTYPE_MAX = 16
|
||||
|
||||
class struct_statfs(Structure):
|
||||
_fields_ = [('f_bsize', uint32_t),
|
||||
('f_iosize', int32_t),
|
||||
('f_blocks', uint64_t),
|
||||
('f_bfree', uint64_t),
|
||||
('f_bavail', uint64_t),
|
||||
('f_files', uint64_t),
|
||||
('f_ffree', uint64_t),
|
||||
('f_fsid', fsid_t),
|
||||
('f_owner', uid_t),
|
||||
('f_type', uint32_t),
|
||||
('f_flags', uint32_t),
|
||||
('f_fssubtype', uint32_t),
|
||||
('f_fstypename', c_char * FSTYPE_MAX),
|
||||
('f_mntonname', c_char * PATH_MAX),
|
||||
('f_mntfromname', c_char * PATH_MAX),
|
||||
('f_flags_ext', uint32_t),
|
||||
('f_reserved', uint32_t * 7)]
|
||||
|
||||
# Calculate the maximum number of bytes required for the attribute buffer
|
||||
_attr_info = (
|
||||
# Common attributes
|
||||
(0, ATTR_CMN_RETURNED_ATTRS, sizeof(attribute_set_t)),
|
||||
(0, ATTR_CMN_NAME, sizeof(attrreference_t) + NAME_MAX * 3 + 1),
|
||||
(0, ATTR_CMN_DEVID, sizeof(dev_t)),
|
||||
(0, ATTR_CMN_FSID, sizeof(fsid_t)),
|
||||
(0, ATTR_CMN_OBJTYPE, sizeof(fsobj_type_t)),
|
||||
(0, ATTR_CMN_OBJTAG, sizeof(fsobj_tag_t)),
|
||||
(0, ATTR_CMN_OBJID, sizeof(fsobj_id_t)),
|
||||
(0, ATTR_CMN_OBJPERMANENTID, sizeof(fsobj_id_t)),
|
||||
(0, ATTR_CMN_PAROBJID, sizeof(fsobj_id_t)),
|
||||
(0, ATTR_CMN_SCRIPT, sizeof(text_encoding_t)),
|
||||
(0, ATTR_CMN_CRTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_MODTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_CHGTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_ACCTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_BKUPTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_FNDRINFO, sizeof(FinderInfo)),
|
||||
(0, ATTR_CMN_OWNERID, sizeof(uid_t)),
|
||||
(0, ATTR_CMN_GRPID, sizeof(gid_t)),
|
||||
(0, ATTR_CMN_ACCESSMASK, sizeof(uint32_t)),
|
||||
(0, ATTR_CMN_NAMEDATTRCOUNT, None),
|
||||
(0, ATTR_CMN_NAMEDATTRLIST, None),
|
||||
(0, ATTR_CMN_FLAGS, sizeof(uint32_t)),
|
||||
(0, ATTR_CMN_GEN_COUNT, sizeof(uint32_t)),
|
||||
(0, ATTR_CMN_DOCUMENT_ID, sizeof(uint32_t)),
|
||||
(0, ATTR_CMN_USERACCESS, sizeof(uint32_t)),
|
||||
(0, ATTR_CMN_EXTENDED_SECURITY, sizeof(attrreference_t) + sizeof(kauth_filesec)),
|
||||
(0, ATTR_CMN_UUID, sizeof(guid_t)),
|
||||
(0, ATTR_CMN_GRPUUID, sizeof(guid_t)),
|
||||
(0, ATTR_CMN_FILEID, sizeof(uint64_t)),
|
||||
(0, ATTR_CMN_PARENTID, sizeof(uint64_t)),
|
||||
(0, ATTR_CMN_FULLPATH, sizeof(attrreference_t) + PATH_MAX),
|
||||
(0, ATTR_CMN_ADDEDTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_DATA_PROTECT_FLAGS, sizeof(uint32_t)),
|
||||
|
||||
# Volume attributes
|
||||
(1, ATTR_VOL_FSTYPE, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_SIGNATURE, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_SIZE, sizeof(off_t)),
|
||||
(1, ATTR_VOL_SPACEFREE, sizeof(off_t)),
|
||||
(1, ATTR_VOL_SPACEAVAIL, sizeof(off_t)),
|
||||
(1, ATTR_VOL_MINALLOCATION, sizeof(off_t)),
|
||||
(1, ATTR_VOL_ALLOCATIONCLUMP, sizeof(off_t)),
|
||||
(1, ATTR_VOL_IOBLOCKSIZE, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_OBJCOUNT, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_FILECOUNT, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_DIRCOUNT, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_MAXOBJCOUNT, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_MOUNTPOINT, sizeof(attrreference_t) + PATH_MAX),
|
||||
(1, ATTR_VOL_NAME, sizeof(attrreference_t) + NAME_MAX + 1),
|
||||
(1, ATTR_VOL_MOUNTFLAGS, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_MOUNTEDDEVICE, sizeof(attrreference_t) + PATH_MAX),
|
||||
(1, ATTR_VOL_ENCODINGSUSED, sizeof(c_ulonglong)),
|
||||
(1, ATTR_VOL_CAPABILITIES, sizeof(vol_capabilities_attr_t)),
|
||||
(1, ATTR_VOL_UUID, sizeof(uuid_t)),
|
||||
(1, ATTR_VOL_QUOTA_SIZE, sizeof(off_t)),
|
||||
(1, ATTR_VOL_RESERVED_SIZE, sizeof(off_t)),
|
||||
(1, ATTR_VOL_ATTRIBUTES, sizeof(vol_attributes_attr_t)),
|
||||
|
||||
# Directory attributes
|
||||
(2, ATTR_DIR_LINKCOUNT, sizeof(uint32_t)),
|
||||
(2, ATTR_DIR_ENTRYCOUNT, sizeof(uint32_t)),
|
||||
(2, ATTR_DIR_MOUNTSTATUS, sizeof(uint32_t)),
|
||||
(2, ATTR_DIR_ALLOCSIZE, sizeof(off_t)),
|
||||
(2, ATTR_DIR_IOBLOCKSIZE, sizeof(uint32_t)),
|
||||
(2, ATTR_DIR_DATALENGTH, sizeof(off_t)),
|
||||
|
||||
# File attributes
|
||||
(3, ATTR_FILE_LINKCOUNT, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_TOTALSIZE, sizeof(off_t)),
|
||||
(3, ATTR_FILE_ALLOCSIZE, sizeof(off_t)),
|
||||
(3, ATTR_FILE_IOBLOCKSIZE, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_CLUMPSIZE, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_DEVTYPE, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_FILETYPE, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_FORKCOUNT, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_FORKLIST, None),
|
||||
(3, ATTR_FILE_DATALENGTH, sizeof(off_t)),
|
||||
(3, ATTR_FILE_DATAALLOCSIZE, sizeof(off_t)),
|
||||
(3, ATTR_FILE_DATAEXTENTS, sizeof(extentrecord)),
|
||||
(3, ATTR_FILE_RSRCLENGTH, sizeof(off_t)),
|
||||
(3, ATTR_FILE_RSRCALLOCSIZE, sizeof(off_t)),
|
||||
(3, ATTR_FILE_RSRCEXTENTS, sizeof(extentrecord)),
|
||||
|
||||
# Fork attributes
|
||||
(4, ATTR_FORK_TOTALSIZE, sizeof(off_t)),
|
||||
(4, ATTR_FORK_ALLOCSIZE, sizeof(off_t)),
|
||||
|
||||
# Extended common attributes
|
||||
(4, ATTR_CMNEXT_RELPATH, sizeof(attrreference_t) + PATH_MAX),
|
||||
(4, ATTR_CMNEXT_PRIVATESIZE, sizeof(off_t)),
|
||||
(4, ATTR_CMNEXT_LINKID, sizeof(uint64_t)),
|
||||
(4, ATTR_CMNEXT_NOFIRMLINKPATH, sizeof(attrreference_t) + PATH_MAX),
|
||||
(4, ATTR_CMNEXT_REALDEVID, sizeof(dev_t)),
|
||||
(4, ATTR_CMNEXT_REALFSID, sizeof(fsid_t)),
|
||||
(4, ATTR_CMNEXT_CLONEID, sizeof(uint64_t)),
|
||||
(4, ATTR_CMNEXT_EXT_FLAGS, sizeof(uint64_t)),
|
||||
)
|
||||
|
||||
def _attrbuf_size(attrs):
|
||||
size = 4
|
||||
for entry in _attr_info:
|
||||
if attrs[entry[0]] & entry[1]:
|
||||
if entry[2] is None:
|
||||
raise ValueError('Unsupported attribute (%u, %x)'
|
||||
% (entry[0], entry[1]))
|
||||
size += entry[2]
|
||||
return size
|
||||
|
||||
_getattrlist = libc.getattrlist
|
||||
_getattrlist.argtypes = [c_char_p, POINTER(attrlist), c_void_p, c_ulong, c_ulong]
|
||||
_getattrlist.restype = c_int
|
||||
|
||||
_fgetattrlist = libc.fgetattrlist
|
||||
_fgetattrlist.argtypes = [c_int, POINTER(attrlist), c_void_p, c_ulong, c_ulong]
|
||||
_fgetattrlist.restype = c_int
|
||||
|
||||
try:
|
||||
_statfs = libc['statfs$INODE64']
|
||||
except (KeyError, AttributeError):
|
||||
_statfs = libc['statfs']
|
||||
|
||||
_statfs.argtypes = [c_char_p, POINTER(struct_statfs)]
|
||||
_statfs.restype = c_int
|
||||
|
||||
try:
|
||||
_fstatfs = libc['fstatfs$INODE64']
|
||||
except (KeyError, AttributeError):
|
||||
_fstatfs = libc['fstatfs']
|
||||
|
||||
_fstatfs.argtypes = [c_int, POINTER(struct_statfs)]
|
||||
_fstatfs.restype = c_int
|
||||
|
||||
def _datetime_from_timespec(ts):
|
||||
td = datetime.timedelta(seconds=ts.tv_sec + 1.0e-9 * ts.tv_nsec)
|
||||
return unix_epoch + td
|
||||
|
||||
def _decode_utf8_nul(sz):
|
||||
nul = sz.find(b'\0')
|
||||
if nul > -1:
|
||||
sz = sz[:nul]
|
||||
return sz.decode('utf-8')
|
||||
|
||||
def _decode_attrlist_result(buf, attrs, options):
|
||||
result = []
|
||||
|
||||
assert len(buf) >= 4
|
||||
total_size = uint32_t.from_buffer(buf, 0).value
|
||||
assert total_size <= len(buf)
|
||||
|
||||
offset = 4
|
||||
|
||||
# Common attributes
|
||||
if attrs[0] & ATTR_CMN_RETURNED_ATTRS:
|
||||
a = attribute_set_t.from_buffer(buf, offset)
|
||||
result.append(a)
|
||||
offset += sizeof (attribute_set_t)
|
||||
if not (options & FSOPT_PACK_INVAL_ATTRS):
|
||||
attrs = [a.commonattr, a.volattr, a.dirattr, a.fileattr, a.forkattr]
|
||||
if attrs[0] & ATTR_CMN_NAME:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
name = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(name)
|
||||
if attrs[0] & ATTR_CMN_DEVID:
|
||||
a = dev_t.from_buffer(buf, offset)
|
||||
offset += sizeof(dev_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_FSID:
|
||||
a = fsid_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsid_t)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_OBJTYPE:
|
||||
a = fsobj_type_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_type_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_OBJTAG:
|
||||
a = fsobj_tag_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_tag_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_OBJID:
|
||||
a = fsobj_id_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_id_t)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_OBJPERMANENTID:
|
||||
a = fsobj_id_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_id_t)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_PAROBJID:
|
||||
a = fsobj_id_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_id_t)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_SCRIPT:
|
||||
a = text_encoding_t.from_buffer(buf, offset)
|
||||
offset += sizeof(text_encoding_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_CRTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_MODTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_CHGTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_ACCTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_BKUPTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_FNDRINFO:
|
||||
a = FinderInfo.from_buffer(buf, offset)
|
||||
offset += sizeof(FinderInfo)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_OWNERID:
|
||||
a = uid_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uid_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_GRPID:
|
||||
a = gid_t.from_buffer(buf, offset)
|
||||
offset += sizeof(gid_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_ACCESSMASK:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_FLAGS:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_GEN_COUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_DOCUMENT_ID:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_USERACCESS:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_EXTENDED_SECURITY:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
offset += sizeof(attrreference_t)
|
||||
ec = uint32_t.from_buffer(buf, ofs + 36).value
|
||||
class kauth_acl(Structure):
|
||||
_fields_ = [('acl_entrycount', c_uint),
|
||||
('acl_flags', c_uint),
|
||||
('acl_ace', kauth_ace * ec)]
|
||||
class kauth_filesec(Structure):
|
||||
_fields_ = [('fsec_magic', c_uint),
|
||||
('fsec_owner', guid_t),
|
||||
('fsec_group', guid_t),
|
||||
('fsec_acl', kauth_acl)]
|
||||
a = kauth_filesec.from_buffer(buf, ofs)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_UUID:
|
||||
result.append(uuid.UUID(bytes=buf[offset:offset+16]))
|
||||
offset += sizeof(guid_t)
|
||||
if attrs[0] & ATTR_CMN_GRPUUID:
|
||||
result.append(uuid.UUID(bytes=buf[offset:offset+16]))
|
||||
offset += sizeof(guid_t)
|
||||
if attrs[0] & ATTR_CMN_FILEID:
|
||||
a = uint64_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint64_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_PARENTID:
|
||||
a = uint64_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint64_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_FULLPATH:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(path)
|
||||
if attrs[0] & ATTR_CMN_ADDEDTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_DATA_PROTECT_FLAGS:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
|
||||
# Volume attributes
|
||||
if attrs[1] & ATTR_VOL_FSTYPE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_SIGNATURE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_SIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_SPACEFREE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_SPACEAVAIL:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_MINALLOCATION:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_ALLOCATIONCLUMP:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_IOBLOCKSIZE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_OBJCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_FILECOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_DIRCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_MAXOBJCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_MOUNTPOINT:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(path)
|
||||
if attrs[1] & ATTR_VOL_NAME:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
name = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(name)
|
||||
if attrs[1] & ATTR_VOL_MOUNTFLAGS:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_MOUNTEDDEVICE:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(path)
|
||||
if attrs[1] & ATTR_VOL_ENCODINGSUSED:
|
||||
a = c_ulonglong.from_buffer(buf, offset)
|
||||
offset += sizeof(c_ulonglong)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_CAPABILITIES:
|
||||
a = vol_capabilities_attr_t.from_buffer(buf, offset)
|
||||
offset += sizeof(vol_capabilities_attr_t)
|
||||
result.append(a)
|
||||
if attrs[1] & ATTR_VOL_UUID:
|
||||
result.append(uuid.UUID(bytes=buf[offset:offset+16]))
|
||||
offset += sizeof(uuid_t)
|
||||
if attrs[1] & ATTR_VOL_QUOTA_SIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_RESERVED_SIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_ATTRIBUTES:
|
||||
a = vol_attributes_attr_t.from_buffer(buf, offset)
|
||||
offset += sizeof(vol_attributes_attr_t)
|
||||
result.append(a)
|
||||
|
||||
# Directory attributes
|
||||
if attrs[2] & ATTR_DIR_LINKCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[2] & ATTR_DIR_ENTRYCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[2] & ATTR_DIR_MOUNTSTATUS:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[2] & ATTR_DIR_ALLOCSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[2] & ATTR_DIR_IOBLOCKSIZE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[2] & ATTR_DIR_DATALENGTH:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
|
||||
# File attributes
|
||||
if attrs[3] & ATTR_FILE_LINKCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_TOTALSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_ALLOCSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_IOBLOCKSIZE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_CLUMPSIZE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_DEVTYPE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_FILETYPE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_FORKCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_DATALENGTH:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_DATAALLOCSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_DATAEXTENTS:
|
||||
a = extentrecord.from_buffer(buf, offset)
|
||||
offset += sizeof(extentrecord)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_RSRCLENGTH:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_RSRCALLOCSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_RSRCEXTENTS:
|
||||
a = extentrecord.from_buffer(buf, offset)
|
||||
offset += sizeof(extentrecord)
|
||||
result.append(a.value)
|
||||
|
||||
# Fork attributes
|
||||
if attrs[4] & ATTR_FORK_TOTALSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[4] & ATTR_FORK_ALLOCSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
|
||||
# Extended common attributes
|
||||
if attrs[4] & ATTR_CMNEXT_RELPATH:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(path)
|
||||
if attrs[4] & ATTR_CMNEXT_PRIVATESIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[4] & ATTR_CMNEXT_LINKID:
|
||||
a = uint64_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint64_t)
|
||||
result.append(a.value)
|
||||
if attrs[4] & ATTR_CMNEXT_NOFIRMLINKPATH:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(path)
|
||||
if attrs[4] & ATTR_CMNEXT_REALDEVID:
|
||||
a = dev_t.from_buffer(buf, offset)
|
||||
offset += sizeof(dev_t)
|
||||
result.append(a.value)
|
||||
if attrs[4] & ATTR_CMNEXT_REALFSID:
|
||||
a = fsid_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsid_t)
|
||||
result.append(a.value)
|
||||
if attrs[4] & ATTR_CMNEXT_CLONEID:
|
||||
a = uint64_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint64_t)
|
||||
result.append(a.value)
|
||||
if attrs[4] & ATTR_CMNEXT_EXT_FLAGS:
|
||||
a = uint64_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint64_t)
|
||||
result.append(a.value)
|
||||
|
||||
return result
|
||||
|
||||
# Sadly, ctypes.get_errno() seems not to work
|
||||
__error = libc.__error
|
||||
__error.restype = POINTER(c_int)
|
||||
|
||||
def _get_errno():
|
||||
return __error().contents.value
|
||||
|
||||
def getattrlist(path, attrs, options):
|
||||
if not isinstance(path, bytes):
|
||||
path = path.encode('utf-8')
|
||||
attrs = list(attrs)
|
||||
if attrs[1]:
|
||||
attrs[1] |= ATTR_VOL_INFO
|
||||
alist = attrlist(bitmapcount=5,
|
||||
commonattr=attrs[0],
|
||||
volattr=attrs[1],
|
||||
dirattr=attrs[2],
|
||||
fileattr=attrs[3],
|
||||
forkattr=attrs[4])
|
||||
|
||||
bufsize = _attrbuf_size(attrs)
|
||||
buf = create_string_buffer(bufsize)
|
||||
|
||||
ret = _getattrlist(path, byref(alist), buf, bufsize,
|
||||
options | FSOPT_REPORT_FULLSIZE)
|
||||
|
||||
if ret < 0:
|
||||
err = _get_errno()
|
||||
raise OSError(err, os.strerror(err), path)
|
||||
|
||||
return _decode_attrlist_result(buf, attrs, options)
|
||||
|
||||
def fgetattrlist(fd, attrs, options):
|
||||
if hasattr(fd, 'fileno'):
|
||||
fd = fd.fileno()
|
||||
attrs = list(attrs)
|
||||
if attrs[1]:
|
||||
attrs[1] |= ATTR_VOL_INFO
|
||||
alist = attrlist(bitmapcount=5,
|
||||
commonattr=attrs[0],
|
||||
volattr=attrs[1],
|
||||
dirattr=attrs[2],
|
||||
fileattr=attrs[3],
|
||||
forkattr=attrs[4])
|
||||
|
||||
bufsize = _attrbuf_size(attrs)
|
||||
buf = create_string_buffer(bufsize)
|
||||
|
||||
ret = _fgetattrlist(fd, byref(alist), buf, bufsize,
|
||||
options | FSOPT_REPORT_FULLSIZE)
|
||||
|
||||
if ret < 0:
|
||||
err = _get_errno()
|
||||
raise OSError(err, os.strerror(err))
|
||||
|
||||
return _decode_attrlist_result(buf, attrs, options)
|
||||
|
||||
def statfs(path):
|
||||
if not isinstance(path, bytes):
|
||||
path = path.encode('utf-8')
|
||||
result = struct_statfs()
|
||||
ret = _statfs(path, byref(result))
|
||||
if ret < 0:
|
||||
err = _get_errno()
|
||||
raise OSError(err, os.strerror(err), path)
|
||||
return result
|
||||
|
||||
def fstatfs(fd):
|
||||
if hasattr(fd, 'fileno'):
|
||||
fd = fd.fileno()
|
||||
result = struct_statfs()
|
||||
ret = _fstatfs(fd, byref(result))
|
||||
if ret < 0:
|
||||
err = _get_errno()
|
||||
raise OSError(err, os.strerror(err))
|
||||
return result
|
||||
18
mc_test/node_modules/dmg-builder/vendor/mac_alias/utils.py
generated
vendored
Executable file
18
mc_test/node_modules/dmg-builder/vendor/mac_alias/utils.py
generated
vendored
Executable file
@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
|
||||
ZERO = datetime.timedelta(0)
|
||||
class UTC (datetime.tzinfo):
|
||||
def utcoffset(self, dt):
|
||||
return ZERO
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
def tzname(self, dt):
|
||||
return 'UTC'
|
||||
|
||||
utc = UTC()
|
||||
mac_epoch = datetime.datetime(1904,1,1,0,0,0,0,utc)
|
||||
unix_epoch = datetime.datetime(1970,1,1,0,0,0,0,utc)
|
||||
osx_epoch = datetime.datetime(2001,1,1,0,0,0,0,utc)
|
||||
Reference in New Issue
Block a user