stuff
This commit is contained in:
977
buildfiles/node_modules/dmg-builder/vendor/biplist/__init__.py
generated
vendored
Normal file
977
buildfiles/node_modules/dmg-builder/vendor/biplist/__init__.py
generated
vendored
Normal file
@@ -0,0 +1,977 @@
|
||||
"""biplist -- a library for reading and writing binary property list files.
|
||||
|
||||
Binary Property List (plist) files provide a faster and smaller serialization
|
||||
format for property lists on OS X. This is a library for generating binary
|
||||
plists which can be read by OS X, iOS, or other clients.
|
||||
|
||||
The API models the plistlib API, and will call through to plistlib when
|
||||
XML serialization or deserialization is required.
|
||||
|
||||
To generate plists with UID values, wrap the values with the Uid object. The
|
||||
value must be an int.
|
||||
|
||||
To generate plists with NSData/CFData values, wrap the values with the
|
||||
Data object. The value must be a string.
|
||||
|
||||
Date values can only be datetime.datetime objects.
|
||||
|
||||
The exceptions InvalidPlistException and NotBinaryPlistException may be
|
||||
thrown to indicate that the data cannot be serialized or deserialized as
|
||||
a binary plist.
|
||||
|
||||
Plist generation example:
|
||||
|
||||
from biplist import *
|
||||
from datetime import datetime
|
||||
plist = {'aKey':'aValue',
|
||||
'0':1.322,
|
||||
'now':datetime.now(),
|
||||
'list':[1,2,3],
|
||||
'tuple':('a','b','c')
|
||||
}
|
||||
try:
|
||||
writePlist(plist, "example.plist")
|
||||
except (InvalidPlistException, NotBinaryPlistException), e:
|
||||
print "Something bad happened:", e
|
||||
|
||||
Plist parsing example:
|
||||
|
||||
from biplist import *
|
||||
try:
|
||||
plist = readPlist("example.plist")
|
||||
print plist
|
||||
except (InvalidPlistException, NotBinaryPlistException), e:
|
||||
print "Not a plist:", e
|
||||
"""
|
||||
|
||||
from collections import namedtuple
|
||||
import datetime
|
||||
import io
|
||||
import math
|
||||
import plistlib
|
||||
from struct import pack, unpack, unpack_from
|
||||
from struct import error as struct_error
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
unicode
|
||||
unicodeEmpty = r''
|
||||
except NameError:
|
||||
unicode = str
|
||||
unicodeEmpty = ''
|
||||
try:
|
||||
long
|
||||
except NameError:
|
||||
long = int
|
||||
try:
|
||||
{}.iteritems
|
||||
iteritems = lambda x: x.iteritems()
|
||||
except AttributeError:
|
||||
iteritems = lambda x: x.items()
|
||||
|
||||
__all__ = [
|
||||
'Uid', 'Data', 'readPlist', 'writePlist', 'readPlistFromString',
|
||||
'writePlistToString', 'InvalidPlistException', 'NotBinaryPlistException'
|
||||
]
|
||||
|
||||
# Apple uses Jan 1, 2001 as a base for all plist date/times.
|
||||
apple_reference_date = datetime.datetime.utcfromtimestamp(978307200)
|
||||
|
||||
class Uid(object):
|
||||
"""Wrapper around integers for representing UID values. This
|
||||
is used in keyed archiving."""
|
||||
integer = 0
|
||||
def __init__(self, integer):
|
||||
self.integer = integer
|
||||
|
||||
def __repr__(self):
|
||||
return "Uid(%d)" % self.integer
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(self, Uid) and isinstance(other, Uid):
|
||||
return self.integer == other.integer
|
||||
return False
|
||||
|
||||
def __cmp__(self, other):
|
||||
return self.integer - other.integer
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.integer < other.integer
|
||||
|
||||
def __hash__(self):
|
||||
return self.integer
|
||||
|
||||
def __int__(self):
|
||||
return int(self.integer)
|
||||
|
||||
class Data(bytes):
|
||||
"""Wrapper around bytes to distinguish Data values."""
|
||||
|
||||
class InvalidPlistException(Exception):
|
||||
"""Raised when the plist is incorrectly formatted."""
|
||||
|
||||
class NotBinaryPlistException(Exception):
|
||||
"""Raised when a binary plist was expected but not encountered."""
|
||||
|
||||
def readPlist(pathOrFile):
|
||||
"""Raises NotBinaryPlistException, InvalidPlistException"""
|
||||
didOpen = False
|
||||
result = None
|
||||
if isinstance(pathOrFile, (bytes, unicode)):
|
||||
pathOrFile = open(pathOrFile, 'rb')
|
||||
didOpen = True
|
||||
try:
|
||||
reader = PlistReader(pathOrFile)
|
||||
result = reader.parse()
|
||||
except NotBinaryPlistException as e:
|
||||
try:
|
||||
pathOrFile.seek(0)
|
||||
result = None
|
||||
if hasattr(plistlib, 'loads'):
|
||||
contents = None
|
||||
if isinstance(pathOrFile, (bytes, unicode)):
|
||||
with open(pathOrFile, 'rb') as f:
|
||||
contents = f.read()
|
||||
else:
|
||||
contents = pathOrFile.read()
|
||||
result = plistlib.loads(contents)
|
||||
else:
|
||||
result = plistlib.readPlist(pathOrFile)
|
||||
result = wrapDataObject(result, for_binary=True)
|
||||
except Exception as e:
|
||||
raise InvalidPlistException(e)
|
||||
finally:
|
||||
if didOpen:
|
||||
pathOrFile.close()
|
||||
return result
|
||||
|
||||
def wrapDataObject(o, for_binary=False):
|
||||
if isinstance(o, Data) and not for_binary:
|
||||
v = sys.version_info
|
||||
if not (v[0] >= 3 and v[1] >= 4):
|
||||
o = plistlib.Data(o)
|
||||
elif isinstance(o, (bytes, plistlib.Data)) and for_binary:
|
||||
if hasattr(o, 'data'):
|
||||
o = Data(o.data)
|
||||
elif isinstance(o, tuple):
|
||||
o = wrapDataObject(list(o), for_binary)
|
||||
o = tuple(o)
|
||||
elif isinstance(o, list):
|
||||
for i in range(len(o)):
|
||||
o[i] = wrapDataObject(o[i], for_binary)
|
||||
elif isinstance(o, dict):
|
||||
for k in o:
|
||||
o[k] = wrapDataObject(o[k], for_binary)
|
||||
return o
|
||||
|
||||
def writePlist(rootObject, pathOrFile, binary=True):
|
||||
if not binary:
|
||||
rootObject = wrapDataObject(rootObject, binary)
|
||||
if hasattr(plistlib, "dump"):
|
||||
if isinstance(pathOrFile, (bytes, unicode)):
|
||||
with open(pathOrFile, 'wb') as f:
|
||||
return plistlib.dump(rootObject, f)
|
||||
else:
|
||||
return plistlib.dump(rootObject, pathOrFile)
|
||||
else:
|
||||
return plistlib.writePlist(rootObject, pathOrFile)
|
||||
else:
|
||||
didOpen = False
|
||||
if isinstance(pathOrFile, (bytes, unicode)):
|
||||
pathOrFile = open(pathOrFile, 'wb')
|
||||
didOpen = True
|
||||
writer = PlistWriter(pathOrFile)
|
||||
result = writer.writeRoot(rootObject)
|
||||
if didOpen:
|
||||
pathOrFile.close()
|
||||
return result
|
||||
|
||||
def readPlistFromString(data):
|
||||
return readPlist(io.BytesIO(data))
|
||||
|
||||
def writePlistToString(rootObject, binary=True):
|
||||
if not binary:
|
||||
rootObject = wrapDataObject(rootObject, binary)
|
||||
if hasattr(plistlib, "dumps"):
|
||||
return plistlib.dumps(rootObject)
|
||||
elif hasattr(plistlib, "writePlistToBytes"):
|
||||
return plistlib.writePlistToBytes(rootObject)
|
||||
else:
|
||||
return plistlib.writePlistToString(rootObject)
|
||||
else:
|
||||
ioObject = io.BytesIO()
|
||||
writer = PlistWriter(ioObject)
|
||||
writer.writeRoot(rootObject)
|
||||
return ioObject.getvalue()
|
||||
|
||||
def is_stream_binary_plist(stream):
|
||||
stream.seek(0)
|
||||
header = stream.read(7)
|
||||
if header == b'bplist0':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
PlistTrailer = namedtuple('PlistTrailer', 'offsetSize, objectRefSize, offsetCount, topLevelObjectNumber, offsetTableOffset')
|
||||
PlistByteCounts = namedtuple('PlistByteCounts', 'nullBytes, boolBytes, intBytes, realBytes, dateBytes, dataBytes, stringBytes, uidBytes, arrayBytes, setBytes, dictBytes')
|
||||
|
||||
class PlistReader(object):
|
||||
file = None
|
||||
contents = ''
|
||||
offsets = None
|
||||
trailer = None
|
||||
currentOffset = 0
|
||||
# Used to detect recursive object references.
|
||||
offsetsStack = []
|
||||
|
||||
def __init__(self, fileOrStream):
|
||||
"""Raises NotBinaryPlistException."""
|
||||
self.reset()
|
||||
self.file = fileOrStream
|
||||
|
||||
def parse(self):
|
||||
return self.readRoot()
|
||||
|
||||
def reset(self):
|
||||
self.trailer = None
|
||||
self.contents = ''
|
||||
self.offsets = []
|
||||
self.currentOffset = 0
|
||||
self.offsetsStack = []
|
||||
|
||||
def readRoot(self):
|
||||
result = None
|
||||
self.reset()
|
||||
# Get the header, make sure it's a valid file.
|
||||
if not is_stream_binary_plist(self.file):
|
||||
raise NotBinaryPlistException()
|
||||
self.file.seek(0)
|
||||
self.contents = self.file.read()
|
||||
if len(self.contents) < 32:
|
||||
raise InvalidPlistException("File is too short.")
|
||||
trailerContents = self.contents[-32:]
|
||||
try:
|
||||
self.trailer = PlistTrailer._make(unpack("!xxxxxxBBQQQ", trailerContents))
|
||||
|
||||
if pow(2, self.trailer.offsetSize*8) < self.trailer.offsetTableOffset:
|
||||
raise InvalidPlistException("Offset size insufficient to reference all objects.")
|
||||
|
||||
if pow(2, self.trailer.objectRefSize*8) < self.trailer.offsetCount:
|
||||
raise InvalidPlistException("Too many offsets to represent in size of object reference representation.")
|
||||
|
||||
offset_size = self.trailer.offsetSize * self.trailer.offsetCount
|
||||
offset = self.trailer.offsetTableOffset
|
||||
|
||||
if offset + offset_size > pow(2, 64):
|
||||
raise InvalidPlistException("Offset table is excessively long.")
|
||||
|
||||
if self.trailer.offsetSize > 16:
|
||||
raise InvalidPlistException("Offset size is greater than maximum integer size.")
|
||||
|
||||
if self.trailer.objectRefSize == 0:
|
||||
raise InvalidPlistException("Object reference size is zero.")
|
||||
|
||||
if offset >= len(self.contents) - 32:
|
||||
raise InvalidPlistException("Offset table offset is too large.")
|
||||
|
||||
if offset < len("bplist00x"):
|
||||
raise InvalidPlistException("Offset table offset is too small.")
|
||||
|
||||
if self.trailer.topLevelObjectNumber >= self.trailer.offsetCount:
|
||||
raise InvalidPlistException("Top level object number is larger than the number of objects.")
|
||||
|
||||
offset_contents = self.contents[offset:offset+offset_size]
|
||||
offset_i = 0
|
||||
offset_table_length = len(offset_contents)
|
||||
|
||||
while offset_i < self.trailer.offsetCount:
|
||||
begin = self.trailer.offsetSize*offset_i
|
||||
end = begin+self.trailer.offsetSize
|
||||
if end > offset_table_length:
|
||||
raise InvalidPlistException("End of object is at invalid offset %d in offset table of length %d" % (end, offset_table_length))
|
||||
tmp_contents = offset_contents[begin:end]
|
||||
tmp_sized = self.getSizedInteger(tmp_contents, self.trailer.offsetSize)
|
||||
self.offsets.append(tmp_sized)
|
||||
offset_i += 1
|
||||
self.setCurrentOffsetToObjectNumber(self.trailer.topLevelObjectNumber)
|
||||
result = self.readObject()
|
||||
except TypeError as e:
|
||||
raise InvalidPlistException(e)
|
||||
return result
|
||||
|
||||
def setCurrentOffsetToObjectNumber(self, objectNumber):
|
||||
if objectNumber > len(self.offsets) - 1:
|
||||
raise InvalidPlistException("Invalid offset number: %d" % objectNumber)
|
||||
self.currentOffset = self.offsets[objectNumber]
|
||||
if self.currentOffset in self.offsetsStack:
|
||||
raise InvalidPlistException("Recursive data structure detected in object: %d" % objectNumber)
|
||||
|
||||
def beginOffsetProtection(self):
|
||||
self.offsetsStack.append(self.currentOffset)
|
||||
return self.currentOffset
|
||||
|
||||
def endOffsetProtection(self, offset):
|
||||
try:
|
||||
index = self.offsetsStack.index(offset)
|
||||
self.offsetsStack = self.offsetsStack[:index]
|
||||
except ValueError as e:
|
||||
pass
|
||||
|
||||
def readObject(self):
|
||||
protection = self.beginOffsetProtection()
|
||||
result = None
|
||||
tmp_byte = self.contents[self.currentOffset:self.currentOffset+1]
|
||||
if len(tmp_byte) != 1:
|
||||
raise InvalidPlistException("No object found at offset: %d" % self.currentOffset)
|
||||
marker_byte = unpack("!B", tmp_byte)[0]
|
||||
format = (marker_byte >> 4) & 0x0f
|
||||
extra = marker_byte & 0x0f
|
||||
self.currentOffset += 1
|
||||
|
||||
def proc_extra(extra):
|
||||
if extra == 0b1111:
|
||||
extra = self.readObject()
|
||||
return extra
|
||||
|
||||
# bool, null, or fill byte
|
||||
if format == 0b0000:
|
||||
if extra == 0b0000:
|
||||
result = None
|
||||
elif extra == 0b1000:
|
||||
result = False
|
||||
elif extra == 0b1001:
|
||||
result = True
|
||||
elif extra == 0b1111:
|
||||
pass # fill byte
|
||||
else:
|
||||
raise InvalidPlistException("Invalid object found at offset: %d" % (self.currentOffset - 1))
|
||||
# int
|
||||
elif format == 0b0001:
|
||||
result = self.readInteger(pow(2, extra))
|
||||
# real
|
||||
elif format == 0b0010:
|
||||
result = self.readReal(extra)
|
||||
# date
|
||||
elif format == 0b0011 and extra == 0b0011:
|
||||
result = self.readDate()
|
||||
# data
|
||||
elif format == 0b0100:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readData(extra)
|
||||
# ascii string
|
||||
elif format == 0b0101:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readAsciiString(extra)
|
||||
# Unicode string
|
||||
elif format == 0b0110:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readUnicode(extra)
|
||||
# uid
|
||||
elif format == 0b1000:
|
||||
result = self.readUid(extra)
|
||||
# array
|
||||
elif format == 0b1010:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readArray(extra)
|
||||
# set
|
||||
elif format == 0b1100:
|
||||
extra = proc_extra(extra)
|
||||
result = set(self.readArray(extra))
|
||||
# dict
|
||||
elif format == 0b1101:
|
||||
extra = proc_extra(extra)
|
||||
result = self.readDict(extra)
|
||||
else:
|
||||
raise InvalidPlistException("Invalid object found: {format: %s, extra: %s}" % (bin(format), bin(extra)))
|
||||
self.endOffsetProtection(protection)
|
||||
return result
|
||||
|
||||
def readContents(self, length, description="Object contents"):
|
||||
end = self.currentOffset + length
|
||||
if end >= len(self.contents) - 32:
|
||||
raise InvalidPlistException("%s extends into trailer" % description)
|
||||
elif length < 0:
|
||||
raise InvalidPlistException("%s length is less than zero" % length)
|
||||
data = self.contents[self.currentOffset:end]
|
||||
return data
|
||||
|
||||
def readInteger(self, byteSize):
|
||||
data = self.readContents(byteSize, "Integer")
|
||||
self.currentOffset = self.currentOffset + byteSize
|
||||
return self.getSizedInteger(data, byteSize, as_number=True)
|
||||
|
||||
def readReal(self, length):
|
||||
to_read = pow(2, length)
|
||||
data = self.readContents(to_read, "Real")
|
||||
if length == 2: # 4 bytes
|
||||
result = unpack('>f', data)[0]
|
||||
elif length == 3: # 8 bytes
|
||||
result = unpack('>d', data)[0]
|
||||
else:
|
||||
raise InvalidPlistException("Unknown Real of length %d bytes" % to_read)
|
||||
return result
|
||||
|
||||
def readRefs(self, count):
|
||||
refs = []
|
||||
i = 0
|
||||
while i < count:
|
||||
fragment = self.readContents(self.trailer.objectRefSize, "Object reference")
|
||||
ref = self.getSizedInteger(fragment, len(fragment))
|
||||
refs.append(ref)
|
||||
self.currentOffset += self.trailer.objectRefSize
|
||||
i += 1
|
||||
return refs
|
||||
|
||||
def readArray(self, count):
|
||||
if not isinstance(count, (int, long)):
|
||||
raise InvalidPlistException("Count of entries in dict isn't of integer type.")
|
||||
result = []
|
||||
values = self.readRefs(count)
|
||||
i = 0
|
||||
while i < len(values):
|
||||
self.setCurrentOffsetToObjectNumber(values[i])
|
||||
value = self.readObject()
|
||||
result.append(value)
|
||||
i += 1
|
||||
return result
|
||||
|
||||
def readDict(self, count):
|
||||
if not isinstance(count, (int, long)):
|
||||
raise InvalidPlistException("Count of keys/values in dict isn't of integer type.")
|
||||
result = {}
|
||||
keys = self.readRefs(count)
|
||||
values = self.readRefs(count)
|
||||
i = 0
|
||||
while i < len(keys):
|
||||
self.setCurrentOffsetToObjectNumber(keys[i])
|
||||
key = self.readObject()
|
||||
self.setCurrentOffsetToObjectNumber(values[i])
|
||||
value = self.readObject()
|
||||
result[key] = value
|
||||
i += 1
|
||||
return result
|
||||
|
||||
def readAsciiString(self, length):
|
||||
if not isinstance(length, (int, long)):
|
||||
raise InvalidPlistException("Length of ASCII string isn't of integer type.")
|
||||
data = self.readContents(length, "ASCII string")
|
||||
result = unpack("!%ds" % length, data)[0]
|
||||
self.currentOffset += length
|
||||
return str(result.decode('ascii'))
|
||||
|
||||
def readUnicode(self, length):
|
||||
if not isinstance(length, (int, long)):
|
||||
raise InvalidPlistException("Length of Unicode string isn't of integer type.")
|
||||
actual_length = length*2
|
||||
data = self.readContents(actual_length, "Unicode string")
|
||||
self.currentOffset += actual_length
|
||||
return data.decode('utf_16_be')
|
||||
|
||||
def readDate(self):
|
||||
data = self.readContents(8, "Date")
|
||||
x = unpack(">d", data)[0]
|
||||
if math.isnan(x):
|
||||
raise InvalidPlistException("Date is NaN")
|
||||
# Use timedelta to workaround time_t size limitation on 32-bit python.
|
||||
try:
|
||||
result = datetime.timedelta(seconds=x) + apple_reference_date
|
||||
except OverflowError:
|
||||
if x > 0:
|
||||
result = datetime.datetime.max
|
||||
else:
|
||||
result = datetime.datetime.min
|
||||
self.currentOffset += 8
|
||||
return result
|
||||
|
||||
def readData(self, length):
|
||||
if not isinstance(length, (int, long)):
|
||||
raise InvalidPlistException("Length of data isn't of integer type.")
|
||||
result = self.readContents(length, "Data")
|
||||
self.currentOffset += length
|
||||
return Data(result)
|
||||
|
||||
def readUid(self, length):
|
||||
if not isinstance(length, (int, long)):
|
||||
raise InvalidPlistException("Uid length isn't of integer type.")
|
||||
return Uid(self.readInteger(length+1))
|
||||
|
||||
def getSizedInteger(self, data, byteSize, as_number=False):
|
||||
"""Numbers of 8 bytes are signed integers when they refer to numbers, but unsigned otherwise."""
|
||||
result = 0
|
||||
if byteSize == 0:
|
||||
raise InvalidPlistException("Encountered integer with byte size of 0.")
|
||||
# 1, 2, and 4 byte integers are unsigned
|
||||
elif byteSize == 1:
|
||||
result = unpack('>B', data)[0]
|
||||
elif byteSize == 2:
|
||||
result = unpack('>H', data)[0]
|
||||
elif byteSize == 4:
|
||||
result = unpack('>L', data)[0]
|
||||
elif byteSize == 8:
|
||||
if as_number:
|
||||
result = unpack('>q', data)[0]
|
||||
else:
|
||||
result = unpack('>Q', data)[0]
|
||||
elif byteSize <= 16:
|
||||
# Handle odd-sized or integers larger than 8 bytes
|
||||
# Don't naively go over 16 bytes, in order to prevent infinite loops.
|
||||
result = 0
|
||||
if hasattr(int, 'from_bytes'):
|
||||
result = int.from_bytes(data, 'big')
|
||||
else:
|
||||
for byte in data:
|
||||
if not isinstance(byte, int): # Python3.0-3.1.x return ints, 2.x return str
|
||||
byte = unpack_from('>B', byte)[0]
|
||||
result = (result << 8) | byte
|
||||
else:
|
||||
raise InvalidPlistException("Encountered integer longer than 16 bytes.")
|
||||
return result
|
||||
|
||||
class HashableWrapper(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
def __repr__(self):
|
||||
return "<HashableWrapper: %s>" % [self.value]
|
||||
|
||||
class BoolWrapper(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
def __repr__(self):
|
||||
return "<BoolWrapper: %s>" % self.value
|
||||
|
||||
class FloatWrapper(object):
|
||||
_instances = {}
|
||||
def __new__(klass, value):
|
||||
# Ensure FloatWrapper(x) for a given float x is always the same object
|
||||
wrapper = klass._instances.get(value)
|
||||
if wrapper is None:
|
||||
wrapper = object.__new__(klass)
|
||||
wrapper.value = value
|
||||
klass._instances[value] = wrapper
|
||||
return wrapper
|
||||
def __repr__(self):
|
||||
return "<FloatWrapper: %s>" % self.value
|
||||
|
||||
class StringWrapper(object):
|
||||
__instances = {}
|
||||
|
||||
encodedValue = None
|
||||
encoding = None
|
||||
|
||||
def __new__(cls, value):
|
||||
'''Ensure we only have a only one instance for any string,
|
||||
and that we encode ascii as 1-byte-per character when possible'''
|
||||
|
||||
encodedValue = None
|
||||
|
||||
for encoding in ('ascii', 'utf_16_be'):
|
||||
try:
|
||||
encodedValue = value.encode(encoding)
|
||||
except: pass
|
||||
if encodedValue is not None:
|
||||
if encodedValue not in cls.__instances:
|
||||
cls.__instances[encodedValue] = super(StringWrapper, cls).__new__(cls)
|
||||
cls.__instances[encodedValue].encodedValue = encodedValue
|
||||
cls.__instances[encodedValue].encoding = encoding
|
||||
return cls.__instances[encodedValue]
|
||||
|
||||
raise ValueError('Unable to get ascii or utf_16_be encoding for %s' % repr(value))
|
||||
|
||||
def __len__(self):
|
||||
'''Return roughly the number of characters in this string (half the byte length)'''
|
||||
if self.encoding == 'ascii':
|
||||
return len(self.encodedValue)
|
||||
else:
|
||||
return len(self.encodedValue)//2
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.encodedValue < other.encodedValue
|
||||
|
||||
@property
|
||||
def encodingMarker(self):
|
||||
if self.encoding == 'ascii':
|
||||
return 0b0101
|
||||
else:
|
||||
return 0b0110
|
||||
|
||||
def __repr__(self):
|
||||
return '<StringWrapper (%s): %s>' % (self.encoding, self.encodedValue)
|
||||
|
||||
class PlistWriter(object):
|
||||
header = b'bplist00bybiplist1.0'
|
||||
file = None
|
||||
byteCounts = None
|
||||
trailer = None
|
||||
computedUniques = None
|
||||
writtenReferences = None
|
||||
referencePositions = None
|
||||
wrappedTrue = None
|
||||
wrappedFalse = None
|
||||
# Used to detect recursive object references.
|
||||
objectsStack = []
|
||||
|
||||
def __init__(self, file):
|
||||
self.reset()
|
||||
self.file = file
|
||||
self.wrappedTrue = BoolWrapper(True)
|
||||
self.wrappedFalse = BoolWrapper(False)
|
||||
|
||||
def reset(self):
|
||||
self.byteCounts = PlistByteCounts(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
|
||||
self.trailer = PlistTrailer(0, 0, 0, 0, 0)
|
||||
|
||||
# A set of all the uniques which have been computed.
|
||||
self.computedUniques = set()
|
||||
# A list of all the uniques which have been written.
|
||||
self.writtenReferences = {}
|
||||
# A dict of the positions of the written uniques.
|
||||
self.referencePositions = {}
|
||||
|
||||
self.objectsStack = []
|
||||
|
||||
def positionOfObjectReference(self, obj):
|
||||
"""If the given object has been written already, return its
|
||||
position in the offset table. Otherwise, return None."""
|
||||
return self.writtenReferences.get(obj)
|
||||
|
||||
def writeRoot(self, root):
|
||||
"""
|
||||
Strategy is:
|
||||
- write header
|
||||
- wrap root object so everything is hashable
|
||||
- compute size of objects which will be written
|
||||
- need to do this in order to know how large the object refs
|
||||
will be in the list/dict/set reference lists
|
||||
- write objects
|
||||
- keep objects in writtenReferences
|
||||
- keep positions of object references in referencePositions
|
||||
- write object references with the length computed previously
|
||||
- computer object reference length
|
||||
- write object reference positions
|
||||
- write trailer
|
||||
"""
|
||||
output = self.header
|
||||
wrapped_root = self.wrapRoot(root)
|
||||
self.computeOffsets(wrapped_root, asReference=True, isRoot=True)
|
||||
self.trailer = self.trailer._replace(**{'objectRefSize':self.intSize(len(self.computedUniques))})
|
||||
self.writeObjectReference(wrapped_root, output)
|
||||
output = self.writeObject(wrapped_root, output, setReferencePosition=True)
|
||||
|
||||
# output size at this point is an upper bound on how big the
|
||||
# object reference offsets need to be.
|
||||
self.trailer = self.trailer._replace(**{
|
||||
'offsetSize':self.intSize(len(output)),
|
||||
'offsetCount':len(self.computedUniques),
|
||||
'offsetTableOffset':len(output),
|
||||
'topLevelObjectNumber':0
|
||||
})
|
||||
|
||||
output = self.writeOffsetTable(output)
|
||||
output += pack('!xxxxxxBBQQQ', *self.trailer)
|
||||
self.file.write(output)
|
||||
|
||||
def beginRecursionProtection(self, obj):
|
||||
if not isinstance(obj, (set, dict, list, tuple)):
|
||||
return
|
||||
if id(obj) in self.objectsStack:
|
||||
raise InvalidPlistException("Recursive containers are not allowed in plists.")
|
||||
self.objectsStack.append(id(obj))
|
||||
|
||||
def endRecursionProtection(self, obj):
|
||||
if not isinstance(obj, (set, dict, list, tuple)):
|
||||
return
|
||||
try:
|
||||
index = self.objectsStack.index(id(obj))
|
||||
self.objectsStack = self.objectsStack[:index]
|
||||
except ValueError as e:
|
||||
pass
|
||||
|
||||
def wrapRoot(self, root):
|
||||
result = None
|
||||
self.beginRecursionProtection(root)
|
||||
|
||||
if isinstance(root, bool):
|
||||
if root is True:
|
||||
result = self.wrappedTrue
|
||||
else:
|
||||
result = self.wrappedFalse
|
||||
elif isinstance(root, float):
|
||||
result = FloatWrapper(root)
|
||||
elif isinstance(root, set):
|
||||
n = set()
|
||||
for value in root:
|
||||
n.add(self.wrapRoot(value))
|
||||
result = HashableWrapper(n)
|
||||
elif isinstance(root, dict):
|
||||
n = {}
|
||||
for key, value in iteritems(root):
|
||||
n[self.wrapRoot(key)] = self.wrapRoot(value)
|
||||
result = HashableWrapper(n)
|
||||
elif isinstance(root, list):
|
||||
n = []
|
||||
for value in root:
|
||||
n.append(self.wrapRoot(value))
|
||||
result = HashableWrapper(n)
|
||||
elif isinstance(root, tuple):
|
||||
n = tuple([self.wrapRoot(value) for value in root])
|
||||
result = HashableWrapper(n)
|
||||
elif isinstance(root, (str, unicode)) and not isinstance(root, Data):
|
||||
result = StringWrapper(root)
|
||||
elif isinstance(root, bytes):
|
||||
result = Data(root)
|
||||
else:
|
||||
result = root
|
||||
|
||||
self.endRecursionProtection(root)
|
||||
return result
|
||||
|
||||
def incrementByteCount(self, field, incr=1):
|
||||
self.byteCounts = self.byteCounts._replace(**{field:self.byteCounts.__getattribute__(field) + incr})
|
||||
|
||||
def computeOffsets(self, obj, asReference=False, isRoot=False):
|
||||
def check_key(key):
|
||||
if key is None:
|
||||
raise InvalidPlistException('Dictionary keys cannot be null in plists.')
|
||||
elif isinstance(key, Data):
|
||||
raise InvalidPlistException('Data cannot be dictionary keys in plists.')
|
||||
elif not isinstance(key, StringWrapper):
|
||||
raise InvalidPlistException('Keys must be strings.')
|
||||
|
||||
def proc_size(size):
|
||||
if size > 0b1110:
|
||||
size += self.intSize(size)
|
||||
return size
|
||||
# If this should be a reference, then we keep a record of it in the
|
||||
# uniques table.
|
||||
if asReference:
|
||||
if obj in self.computedUniques:
|
||||
return
|
||||
else:
|
||||
self.computedUniques.add(obj)
|
||||
|
||||
if obj is None:
|
||||
self.incrementByteCount('nullBytes')
|
||||
elif isinstance(obj, BoolWrapper):
|
||||
self.incrementByteCount('boolBytes')
|
||||
elif isinstance(obj, Uid):
|
||||
size = self.intSize(obj.integer)
|
||||
self.incrementByteCount('uidBytes', incr=1+size)
|
||||
elif isinstance(obj, (int, long)):
|
||||
size = self.intSize(obj)
|
||||
self.incrementByteCount('intBytes', incr=1+size)
|
||||
elif isinstance(obj, FloatWrapper):
|
||||
size = self.realSize(obj)
|
||||
self.incrementByteCount('realBytes', incr=1+size)
|
||||
elif isinstance(obj, datetime.datetime):
|
||||
self.incrementByteCount('dateBytes', incr=2)
|
||||
elif isinstance(obj, Data):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('dataBytes', incr=1+size)
|
||||
elif isinstance(obj, StringWrapper):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('stringBytes', incr=1+size)
|
||||
elif isinstance(obj, HashableWrapper):
|
||||
obj = obj.value
|
||||
if isinstance(obj, set):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('setBytes', incr=1+size)
|
||||
for value in obj:
|
||||
self.computeOffsets(value, asReference=True)
|
||||
elif isinstance(obj, (list, tuple)):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('arrayBytes', incr=1+size)
|
||||
for value in obj:
|
||||
asRef = True
|
||||
self.computeOffsets(value, asReference=True)
|
||||
elif isinstance(obj, dict):
|
||||
size = proc_size(len(obj))
|
||||
self.incrementByteCount('dictBytes', incr=1+size)
|
||||
for key, value in iteritems(obj):
|
||||
check_key(key)
|
||||
self.computeOffsets(key, asReference=True)
|
||||
self.computeOffsets(value, asReference=True)
|
||||
else:
|
||||
raise InvalidPlistException("Unknown object type: %s (%s)" % (type(obj).__name__, repr(obj)))
|
||||
|
||||
def writeObjectReference(self, obj, output):
|
||||
"""Tries to write an object reference, adding it to the references
|
||||
table. Does not write the actual object bytes or set the reference
|
||||
position. Returns a tuple of whether the object was a new reference
|
||||
(True if it was, False if it already was in the reference table)
|
||||
and the new output.
|
||||
"""
|
||||
position = self.positionOfObjectReference(obj)
|
||||
if position is None:
|
||||
self.writtenReferences[obj] = len(self.writtenReferences)
|
||||
output += self.binaryInt(len(self.writtenReferences) - 1, byteSize=self.trailer.objectRefSize)
|
||||
return (True, output)
|
||||
else:
|
||||
output += self.binaryInt(position, byteSize=self.trailer.objectRefSize)
|
||||
return (False, output)
|
||||
|
||||
def writeObject(self, obj, output, setReferencePosition=False):
|
||||
"""Serializes the given object to the output. Returns output.
|
||||
If setReferencePosition is True, will set the position the
|
||||
object was written.
|
||||
"""
|
||||
def proc_variable_length(format, length):
|
||||
result = b''
|
||||
if length > 0b1110:
|
||||
result += pack('!B', (format << 4) | 0b1111)
|
||||
result = self.writeObject(length, result)
|
||||
else:
|
||||
result += pack('!B', (format << 4) | length)
|
||||
return result
|
||||
|
||||
def timedelta_total_seconds(td):
|
||||
# Shim for Python 2.6 compatibility, which doesn't have total_seconds.
|
||||
# Make one argument a float to ensure the right calculation.
|
||||
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10.0**6) / 10.0**6
|
||||
|
||||
if setReferencePosition:
|
||||
self.referencePositions[obj] = len(output)
|
||||
|
||||
if obj is None:
|
||||
output += pack('!B', 0b00000000)
|
||||
elif isinstance(obj, BoolWrapper):
|
||||
if obj.value is False:
|
||||
output += pack('!B', 0b00001000)
|
||||
else:
|
||||
output += pack('!B', 0b00001001)
|
||||
elif isinstance(obj, Uid):
|
||||
size = self.intSize(obj.integer)
|
||||
output += pack('!B', (0b1000 << 4) | size - 1)
|
||||
output += self.binaryInt(obj.integer)
|
||||
elif isinstance(obj, (int, long)):
|
||||
byteSize = self.intSize(obj)
|
||||
root = math.log(byteSize, 2)
|
||||
output += pack('!B', (0b0001 << 4) | int(root))
|
||||
output += self.binaryInt(obj, as_number=True)
|
||||
elif isinstance(obj, FloatWrapper):
|
||||
# just use doubles
|
||||
output += pack('!B', (0b0010 << 4) | 3)
|
||||
output += self.binaryReal(obj)
|
||||
elif isinstance(obj, datetime.datetime):
|
||||
try:
|
||||
timestamp = (obj - apple_reference_date).total_seconds()
|
||||
except AttributeError:
|
||||
timestamp = timedelta_total_seconds(obj - apple_reference_date)
|
||||
output += pack('!B', 0b00110011)
|
||||
output += pack('!d', float(timestamp))
|
||||
elif isinstance(obj, Data):
|
||||
output += proc_variable_length(0b0100, len(obj))
|
||||
output += obj
|
||||
elif isinstance(obj, StringWrapper):
|
||||
output += proc_variable_length(obj.encodingMarker, len(obj))
|
||||
output += obj.encodedValue
|
||||
elif isinstance(obj, bytes):
|
||||
output += proc_variable_length(0b0101, len(obj))
|
||||
output += obj
|
||||
elif isinstance(obj, HashableWrapper):
|
||||
obj = obj.value
|
||||
if isinstance(obj, (set, list, tuple)):
|
||||
if isinstance(obj, set):
|
||||
output += proc_variable_length(0b1100, len(obj))
|
||||
else:
|
||||
output += proc_variable_length(0b1010, len(obj))
|
||||
|
||||
objectsToWrite = []
|
||||
for objRef in sorted(obj) if isinstance(obj, set) else obj:
|
||||
(isNew, output) = self.writeObjectReference(objRef, output)
|
||||
if isNew:
|
||||
objectsToWrite.append(objRef)
|
||||
for objRef in objectsToWrite:
|
||||
output = self.writeObject(objRef, output, setReferencePosition=True)
|
||||
elif isinstance(obj, dict):
|
||||
output += proc_variable_length(0b1101, len(obj))
|
||||
keys = []
|
||||
values = []
|
||||
objectsToWrite = []
|
||||
for key, value in sorted(iteritems(obj)):
|
||||
keys.append(key)
|
||||
values.append(value)
|
||||
for key in keys:
|
||||
(isNew, output) = self.writeObjectReference(key, output)
|
||||
if isNew:
|
||||
objectsToWrite.append(key)
|
||||
for value in values:
|
||||
(isNew, output) = self.writeObjectReference(value, output)
|
||||
if isNew:
|
||||
objectsToWrite.append(value)
|
||||
for objRef in objectsToWrite:
|
||||
output = self.writeObject(objRef, output, setReferencePosition=True)
|
||||
return output
|
||||
|
||||
def writeOffsetTable(self, output):
|
||||
"""Writes all of the object reference offsets."""
|
||||
all_positions = []
|
||||
writtenReferences = list(self.writtenReferences.items())
|
||||
writtenReferences.sort(key=lambda x: x[1])
|
||||
for obj,order in writtenReferences:
|
||||
# Porting note: Elsewhere we deliberately replace empty unicdoe strings
|
||||
# with empty binary strings, but the empty unicode string
|
||||
# goes into writtenReferences. This isn't an issue in Py2
|
||||
# because u'' and b'' have the same hash; but it is in
|
||||
# Py3, where they don't.
|
||||
if bytes != str and obj == unicodeEmpty:
|
||||
obj = b''
|
||||
position = self.referencePositions.get(obj)
|
||||
if position is None:
|
||||
raise InvalidPlistException("Error while writing offsets table. Object not found. %s" % obj)
|
||||
output += self.binaryInt(position, self.trailer.offsetSize)
|
||||
all_positions.append(position)
|
||||
return output
|
||||
|
||||
def binaryReal(self, obj):
|
||||
# just use doubles
|
||||
result = pack('>d', obj.value)
|
||||
return result
|
||||
|
||||
def binaryInt(self, obj, byteSize=None, as_number=False):
|
||||
result = b''
|
||||
if byteSize is None:
|
||||
byteSize = self.intSize(obj)
|
||||
if byteSize == 1:
|
||||
result += pack('>B', obj)
|
||||
elif byteSize == 2:
|
||||
result += pack('>H', obj)
|
||||
elif byteSize == 4:
|
||||
result += pack('>L', obj)
|
||||
elif byteSize == 8:
|
||||
if as_number:
|
||||
result += pack('>q', obj)
|
||||
else:
|
||||
result += pack('>Q', obj)
|
||||
elif byteSize <= 16:
|
||||
try:
|
||||
result = pack('>Q', 0) + pack('>Q', obj)
|
||||
except struct_error as e:
|
||||
raise InvalidPlistException("Unable to pack integer %d: %s" % (obj, e))
|
||||
else:
|
||||
raise InvalidPlistException("Core Foundation can't handle integers with size greater than 16 bytes.")
|
||||
return result
|
||||
|
||||
def intSize(self, obj):
|
||||
"""Returns the number of bytes necessary to store the given integer."""
|
||||
# SIGNED
|
||||
if obj < 0: # Signed integer, always 8 bytes
|
||||
return 8
|
||||
# UNSIGNED
|
||||
elif obj <= 0xFF: # 1 byte
|
||||
return 1
|
||||
elif obj <= 0xFFFF: # 2 bytes
|
||||
return 2
|
||||
elif obj <= 0xFFFFFFFF: # 4 bytes
|
||||
return 4
|
||||
# SIGNED
|
||||
# 0x7FFFFFFFFFFFFFFF is the max.
|
||||
elif obj <= 0x7FFFFFFFFFFFFFFF: # 8 bytes signed
|
||||
return 8
|
||||
elif obj <= 0xffffffffffffffff: # 8 bytes unsigned
|
||||
return 16
|
||||
else:
|
||||
raise InvalidPlistException("Core Foundation can't handle integers with size greater than 8 bytes.")
|
||||
|
||||
def realSize(self, obj):
|
||||
return 8
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/biplist/__init__.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/biplist/__init__.pyc
generated
vendored
Normal file
Binary file not shown.
143
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/badge.py
generated
vendored
Normal file
143
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/badge.py
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from Quartz import *
|
||||
import math
|
||||
|
||||
_REMOVABLE_DISK_PATH = '/System/Library/Extensions/IOStorageFamily.kext/Contents/Resources/Removable.icns'
|
||||
|
||||
def badge_disk_icon(badge_file, output_file):
|
||||
# Load the Removable disk icon
|
||||
url = CFURLCreateWithFileSystemPath(None, _REMOVABLE_DISK_PATH,
|
||||
kCFURLPOSIXPathStyle, False)
|
||||
backdrop = CGImageSourceCreateWithURL(url, None)
|
||||
backdropCount = CGImageSourceGetCount(backdrop)
|
||||
|
||||
# Load the badge
|
||||
url = CFURLCreateWithFileSystemPath(None, badge_file,
|
||||
kCFURLPOSIXPathStyle, False)
|
||||
badge = CGImageSourceCreateWithURL(url, None)
|
||||
assert badge is not None, 'Unable to process image file: %s' % badge_file
|
||||
badgeCount = CGImageSourceGetCount(badge)
|
||||
|
||||
# Set up a destination for our target
|
||||
url = CFURLCreateWithFileSystemPath(None, output_file,
|
||||
kCFURLPOSIXPathStyle, False)
|
||||
target = CGImageDestinationCreateWithURL(url, 'com.apple.icns',
|
||||
backdropCount, None)
|
||||
|
||||
# Get the RGB colorspace
|
||||
rgbColorSpace = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB)
|
||||
|
||||
# Scale
|
||||
scale = 1.0
|
||||
|
||||
# Perspective transform
|
||||
corners = ((0.2, 0.95), (0.8, 0.95), (0.85, 0.35), (0.15, 0.35))
|
||||
|
||||
# Translation
|
||||
position = (0.5, 0.5)
|
||||
|
||||
for n in range(backdropCount):
|
||||
props = CGImageSourceCopyPropertiesAtIndex(backdrop, n, None)
|
||||
width = props['PixelWidth']
|
||||
height = props['PixelHeight']
|
||||
dpi = props['DPIWidth']
|
||||
depth = props['Depth']
|
||||
|
||||
# Choose the best sized badge image
|
||||
bestWidth = None
|
||||
bestHeight = None
|
||||
bestBadge = None
|
||||
bestDepth = None
|
||||
bestDPI = None
|
||||
for m in range(badgeCount):
|
||||
badgeProps = CGImageSourceCopyPropertiesAtIndex(badge, m, None)
|
||||
badgeWidth = badgeProps['PixelWidth']
|
||||
badgeHeight = badgeProps['PixelHeight']
|
||||
badgeDPI = badgeProps['DPIWidth']
|
||||
badgeDepth = badgeProps['Depth']
|
||||
|
||||
if bestBadge is None or (badgeWidth <= width
|
||||
and (bestWidth > width
|
||||
or badgeWidth > bestWidth
|
||||
or (badgeWidth == bestWidth
|
||||
and badgeDPI == dpi
|
||||
and badgeDepth <= depth
|
||||
and (bestDepth is None
|
||||
or badgeDepth > bestDepth)))):
|
||||
bestBadge = m
|
||||
bestWidth = badgeWidth
|
||||
bestHeight = badgeHeight
|
||||
bestDPI = badgeDPI
|
||||
bestDepth = badgeDepth
|
||||
|
||||
badgeImage = CGImageSourceCreateImageAtIndex(badge, bestBadge, None)
|
||||
badgeCI = CIImage.imageWithCGImage_(badgeImage)
|
||||
|
||||
backgroundImage = CGImageSourceCreateImageAtIndex(backdrop, n, None)
|
||||
backgroundCI = CIImage.imageWithCGImage_(backgroundImage)
|
||||
|
||||
compositor = CIFilter.filterWithName_('CISourceOverCompositing')
|
||||
lanczos = CIFilter.filterWithName_('CILanczosScaleTransform')
|
||||
perspective = CIFilter.filterWithName_('CIPerspectiveTransform')
|
||||
transform = CIFilter.filterWithName_('CIAffineTransform')
|
||||
|
||||
lanczos.setValue_forKey_(badgeCI, kCIInputImageKey)
|
||||
lanczos.setValue_forKey_(scale * float(width)/bestWidth, kCIInputScaleKey)
|
||||
lanczos.setValue_forKey_(1.0, kCIInputAspectRatioKey)
|
||||
|
||||
topLeft = (width * scale * corners[0][0],
|
||||
width * scale * corners[0][1])
|
||||
topRight = (width * scale * corners[1][0],
|
||||
width * scale * corners[1][1])
|
||||
bottomRight = (width * scale * corners[2][0],
|
||||
width * scale * corners[2][1])
|
||||
bottomLeft = (width * scale * corners[3][0],
|
||||
width * scale * corners[3][1])
|
||||
|
||||
out = lanczos.valueForKey_(kCIOutputImageKey)
|
||||
if width >= 16:
|
||||
perspective.setValue_forKey_(out, kCIInputImageKey)
|
||||
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*topLeft),
|
||||
'inputTopLeft')
|
||||
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*topRight),
|
||||
'inputTopRight')
|
||||
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*bottomRight),
|
||||
'inputBottomRight')
|
||||
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*bottomLeft),
|
||||
'inputBottomLeft')
|
||||
out = perspective.valueForKey_(kCIOutputImageKey)
|
||||
|
||||
tfm = NSAffineTransform.transform()
|
||||
tfm.translateXBy_yBy_(math.floor((position[0] - 0.5 * scale) * width),
|
||||
math.floor((position[1] - 0.5 * scale) * height))
|
||||
|
||||
transform.setValue_forKey_(out, kCIInputImageKey)
|
||||
transform.setValue_forKey_(tfm, 'inputTransform')
|
||||
out = transform.valueForKey_(kCIOutputImageKey)
|
||||
|
||||
compositor.setValue_forKey_(out, kCIInputImageKey)
|
||||
compositor.setValue_forKey_(backgroundCI, kCIInputBackgroundImageKey)
|
||||
|
||||
result = compositor.valueForKey_(kCIOutputImageKey)
|
||||
|
||||
cgContext = CGBitmapContextCreate(None,
|
||||
width,
|
||||
height,
|
||||
8,
|
||||
0,
|
||||
rgbColorSpace,
|
||||
kCGImageAlphaPremultipliedLast)
|
||||
context = CIContext.contextWithCGContext_options_(cgContext, None)
|
||||
|
||||
context.drawImage_inRect_fromRect_(result,
|
||||
((0, 0), (width, height)),
|
||||
((0, 0), (width, height)))
|
||||
|
||||
image = CGBitmapContextCreateImage(cgContext)
|
||||
|
||||
CGImageDestinationAddImage(target, image, props)
|
||||
|
||||
CGImageDestinationFinalize(target)
|
||||
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/badge.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/badge.pyc
generated
vendored
Normal file
Binary file not shown.
494
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/colors.py
generated
vendored
Normal file
494
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/colors.py
generated
vendored
Normal file
@@ -0,0 +1,494 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
import math
|
||||
|
||||
class Color (object):
|
||||
def to_rgb(self):
|
||||
raise Exception('Must implement to_rgb() in subclasses')
|
||||
|
||||
class RGB (Color):
|
||||
def __init__(self, r, g, b):
|
||||
self.r = r
|
||||
self.g = g
|
||||
self.b = b
|
||||
|
||||
def to_rgb(self):
|
||||
return self
|
||||
|
||||
class HSL (Color):
|
||||
def __init__(self, h, s, l):
|
||||
self.h = h
|
||||
self.s = s
|
||||
self.l = l
|
||||
|
||||
@staticmethod
|
||||
def _hue_to_rgb(t1, t2, hue):
|
||||
if hue < 0:
|
||||
hue += 6
|
||||
elif hue >= 6:
|
||||
hue -= 6
|
||||
|
||||
if hue < 1:
|
||||
return (t2 - t1) * hue + t1
|
||||
elif hue < 3:
|
||||
return t2
|
||||
elif hue < 4:
|
||||
return (t2 - t1) * (4 - hue) + t1
|
||||
else:
|
||||
return t1
|
||||
|
||||
def to_rgb(self):
|
||||
hue = self.h / 60.0
|
||||
if self.l <= 0.5:
|
||||
t2 = self.l * (self.s + 1)
|
||||
else:
|
||||
t2 = self.l + self.s - (self.l * self.s)
|
||||
t1 = self.l * 2 - t2
|
||||
r = self._hue_to_rgb(t1, t2, hue + 2)
|
||||
g = self._hue_to_rgb(t1, t2, hue)
|
||||
b = self._hue_to_rgb(t1, t2, hue - 2)
|
||||
return RGB(r, g, b)
|
||||
|
||||
class HWB (Color):
|
||||
def __init__(self, h, w, b):
|
||||
self.h = h
|
||||
self.w = w
|
||||
self.b = b
|
||||
|
||||
@staticmethod
|
||||
def _hue_to_rgb(hue):
|
||||
if hue < 0:
|
||||
hue += 6
|
||||
elif hue >= 6:
|
||||
hue -= 6
|
||||
|
||||
if hue < 1:
|
||||
return hue
|
||||
elif hue < 3:
|
||||
return 1
|
||||
elif hue < 4:
|
||||
return (4 - hue)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def to_rgb(self):
|
||||
hue = self.h / 60.0
|
||||
t1 = 1 - self.w - self.b
|
||||
r = self._hue_to_rgb(hue + 2) * t1 + self.w
|
||||
g = self._hue_to_rgb(hue) * t1 + self.w
|
||||
b = self._hue_to_rgb(hue - 2) * t1 + self.w
|
||||
return RGB(r, g, b)
|
||||
|
||||
class CMYK (Color):
|
||||
def __init__(self, c, m, y, k):
|
||||
self.c = c
|
||||
self.m = m
|
||||
self.y = y
|
||||
self.k = k
|
||||
|
||||
def to_rgb(self):
|
||||
r = 1.0 - min(1.0, self.c + self.k)
|
||||
g = 1.0 - min(1.0, self.m + self.k)
|
||||
b = 1.0 - min(1.0, self.y + self.k)
|
||||
return RGB(r, g, b)
|
||||
|
||||
class Gray (Color):
|
||||
def __init__(self, g):
|
||||
self.g = g
|
||||
|
||||
def to_rgb(self):
|
||||
return RGB(g, g, g)
|
||||
|
||||
_x11_colors = {
|
||||
'aliceblue': (240, 248, 255),
|
||||
'antiquewhite': (250, 235, 215),
|
||||
'aqua': ( 0, 255, 255),
|
||||
'aquamarine': (127, 255, 212),
|
||||
'azure': (240, 255, 255),
|
||||
'beige': (245, 245, 220),
|
||||
'bisque': (255, 228, 196),
|
||||
'black': ( 0, 0, 0),
|
||||
'blanchedalmond': (255, 235, 205),
|
||||
'blue': ( 0, 0, 255),
|
||||
'blueviolet': (138, 43, 226),
|
||||
'brown': (165, 42, 42),
|
||||
'burlywood': (222, 184, 135),
|
||||
'cadetblue': ( 95, 158, 160),
|
||||
'chartreuse': (127, 255, 0),
|
||||
'chocolate': (210, 105, 30),
|
||||
'coral': (255, 127, 80),
|
||||
'cornflowerblue': (100, 149, 237),
|
||||
'cornsilk': (255, 248, 220),
|
||||
'crimson': (220, 20, 60),
|
||||
'cyan': ( 0, 255, 255),
|
||||
'darkblue': ( 0, 0, 139),
|
||||
'darkcyan': ( 0, 139, 139),
|
||||
'darkgoldenrod': (184, 134, 11),
|
||||
'darkgray': (169, 169, 169),
|
||||
'darkgreen': ( 0, 100, 0),
|
||||
'darkgrey': (169, 169, 169),
|
||||
'darkkhaki': (189, 183, 107),
|
||||
'darkmagenta': (139, 0, 139),
|
||||
'darkolivegreen': ( 85, 107, 47),
|
||||
'darkorange': (255, 140, 0),
|
||||
'darkorchid': (153, 50, 204),
|
||||
'darkred': (139, 0, 0),
|
||||
'darksalmon': (233, 150, 122),
|
||||
'darkseagreen': (143, 188, 143),
|
||||
'darkslateblue': ( 72, 61, 139),
|
||||
'darkslategray': ( 47, 79, 79),
|
||||
'darkslategrey': ( 47, 79, 79),
|
||||
'darkturquoise': ( 0, 206, 209),
|
||||
'darkviolet': (148, 0, 211),
|
||||
'deeppink': (255, 20, 147),
|
||||
'deepskyblue': ( 0, 191, 255),
|
||||
'dimgray': (105, 105, 105),
|
||||
'dimgrey': (105, 105, 105),
|
||||
'dodgerblue': ( 30, 144, 255),
|
||||
'firebrick': (178, 34, 34),
|
||||
'floralwhite': (255, 250, 240),
|
||||
'forestgreen': ( 34, 139, 34),
|
||||
'fuchsia': (255, 0, 255),
|
||||
'gainsboro': (220, 220, 220),
|
||||
'ghostwhite': (248, 248, 255),
|
||||
'gold': (255, 215, 0),
|
||||
'goldenrod': (218, 165, 32),
|
||||
'gray': (128, 128, 128),
|
||||
'grey': (128, 128, 128),
|
||||
'green': ( 0, 128, 0),
|
||||
'greenyellow': (173, 255, 47),
|
||||
'honeydew': (240, 255, 240),
|
||||
'hotpink': (255, 105, 180),
|
||||
'indianred': (205, 92, 92),
|
||||
'indigo': ( 75, 0, 130),
|
||||
'ivory': (255, 255, 240),
|
||||
'khaki': (240, 230, 140),
|
||||
'lavender': (230, 230, 250),
|
||||
'lavenderblush': (255, 240, 245),
|
||||
'lawngreen': (124, 252, 0),
|
||||
'lemonchiffon': (255, 250, 205),
|
||||
'lightblue': (173, 216, 230),
|
||||
'lightcoral': (240, 128, 128),
|
||||
'lightcyan': (224, 255, 255),
|
||||
'lightgoldenrodyellow': (250, 250, 210),
|
||||
'lightgray': (211, 211, 211),
|
||||
'lightgreen': (144, 238, 144),
|
||||
'lightgrey': (211, 211, 211),
|
||||
'lightpink': (255, 182, 193),
|
||||
'lightsalmon': (255, 160, 122),
|
||||
'lightseagreen': ( 32, 178, 170),
|
||||
'lightskyblue': (135, 206, 250),
|
||||
'lightslategray': (119, 136, 153),
|
||||
'lightslategrey': (119, 136, 153),
|
||||
'lightsteelblue': (176, 196, 222),
|
||||
'lightyellow': (255, 255, 224),
|
||||
'lime': ( 0, 255, 0),
|
||||
'limegreen': ( 50, 205, 50),
|
||||
'linen': (250, 240, 230),
|
||||
'magenta': (255, 0, 255),
|
||||
'maroon': (128, 0, 0),
|
||||
'mediumaquamarine': (102, 205, 170),
|
||||
'mediumblue': ( 0, 0, 205),
|
||||
'mediumorchid': (186, 85, 211),
|
||||
'mediumpurple': (147, 112, 219),
|
||||
'mediumseagreen': ( 60, 179, 113),
|
||||
'mediumslateblue': (123, 104, 238),
|
||||
'mediumspringgreen': ( 0, 250, 154),
|
||||
'mediumturquoise': ( 72, 209, 204),
|
||||
'mediumvioletred': (199, 21, 133),
|
||||
'midnightblue': ( 25, 25, 112),
|
||||
'mintcream': (245, 255, 250),
|
||||
'mistyrose': (255, 228, 225),
|
||||
'moccasin': (255, 228, 181),
|
||||
'navajowhite': (255, 222, 173),
|
||||
'navy': ( 0, 0, 128),
|
||||
'oldlace': (253, 245, 230),
|
||||
'olive': (128, 128, 0),
|
||||
'olivedrab': (107, 142, 35),
|
||||
'orange': (255, 165, 0),
|
||||
'orangered': (255, 69, 0),
|
||||
'orchid': (218, 112, 214),
|
||||
'palegoldenrod': (238, 232, 170),
|
||||
'palegreen': (152, 251, 152),
|
||||
'paleturquoise': (175, 238, 238),
|
||||
'palevioletred': (219, 112, 147),
|
||||
'papayawhip': (255, 239, 213),
|
||||
'peachpuff': (255, 218, 185),
|
||||
'peru': (205, 133, 63),
|
||||
'pink': (255, 192, 203),
|
||||
'plum': (221, 160, 221),
|
||||
'powderblue': (176, 224, 230),
|
||||
'purple': (128, 0, 128),
|
||||
'red': (255, 0, 0),
|
||||
'rosybrown': (188, 143, 143),
|
||||
'royalblue': ( 65, 105, 225),
|
||||
'saddlebrown': (139, 69, 19),
|
||||
'salmon': (250, 128, 114),
|
||||
'sandybrown': (244, 164, 96),
|
||||
'seagreen': ( 46, 139, 87),
|
||||
'seashell': (255, 245, 238),
|
||||
'sienna': (160, 82, 45),
|
||||
'silver': (192, 192, 192),
|
||||
'skyblue': (135, 206, 235),
|
||||
'slateblue': (106, 90, 205),
|
||||
'slategray': (112, 128, 144),
|
||||
'slategrey': (112, 128, 144),
|
||||
'snow': (255, 250, 250),
|
||||
'springgreen': ( 0, 255, 127),
|
||||
'steelblue': ( 70, 130, 180),
|
||||
'tan': (210, 180, 140),
|
||||
'teal': ( 0, 128, 128),
|
||||
'thistle': (216, 191, 216),
|
||||
'tomato': (255, 99, 71),
|
||||
'turquoise': ( 64, 224, 208),
|
||||
'violet': (238, 130, 238),
|
||||
'wheat': (245, 222, 179),
|
||||
'white': (255, 255, 255),
|
||||
'whitesmoke': (245, 245, 245),
|
||||
'yellow': (255, 255, 0),
|
||||
'yellowgreen': (154, 205, 50)
|
||||
}
|
||||
|
||||
_ws_re = re.compile('\s+')
|
||||
_token_re = re.compile('[A-Za-z_][A-Za-z0-9_]*')
|
||||
_hex_re = re.compile('#([0-9a-f]{3}(?:[0-9a-f]{3})?)$')
|
||||
_number_re = re.compile('[0-9]*(\.[0-9]*)')
|
||||
|
||||
class ColorParser (object):
|
||||
def __init__(self, s):
|
||||
self._string = s
|
||||
self._pos = 0
|
||||
|
||||
def skipws(self):
|
||||
m = _ws_re.match(self._string, self._pos)
|
||||
if m:
|
||||
self._pos = m.end(0)
|
||||
|
||||
def expect(self, s, context=''):
|
||||
if len(self._string) - self._pos < len(s) \
|
||||
or self._string[self._pos:self._pos + len(s)] != s:
|
||||
raise ValueError('bad color "%s" - expected "%s"%s'
|
||||
% (self._string, s, context))
|
||||
self._pos += len(s)
|
||||
|
||||
def expectEnd(self):
|
||||
if self._pos != len(self._string):
|
||||
raise ValueError('junk at end of color "%s"' % self._string)
|
||||
|
||||
def getToken(self):
|
||||
m = _token_re.match(self._string, self._pos)
|
||||
if m:
|
||||
token = m.group(0)
|
||||
|
||||
self._pos = m.end(0)
|
||||
return token
|
||||
return None
|
||||
|
||||
def parseNumber(self, context=''):
|
||||
m = _number_re.match(self._string, self._pos)
|
||||
if m:
|
||||
self._pos = m.end(0)
|
||||
return float(m.group(0))
|
||||
raise ValueError('bad color "%s" - expected a number%s'
|
||||
% (self._string, context))
|
||||
|
||||
def parseColor(self):
|
||||
self.skipws()
|
||||
|
||||
token = self.getToken()
|
||||
if token:
|
||||
if token == 'rgb':
|
||||
return self.parseRGB()
|
||||
elif token == 'hsl':
|
||||
return self.parseHSL()
|
||||
elif token == 'hwb':
|
||||
return self.parseHWB()
|
||||
elif token == 'cmyk':
|
||||
return self.parseCMYK()
|
||||
elif token == 'gray' or token == 'grey':
|
||||
return self.parseGray()
|
||||
|
||||
try:
|
||||
r, g, b = _x11_colors[token]
|
||||
except KeyError:
|
||||
raise ValueError('unknown color name "%s"' % token)
|
||||
|
||||
self.expectEnd()
|
||||
|
||||
return RGB(r / 255.0, g / 255.0, b / 255.0)
|
||||
|
||||
m = _hex_re.match(self._string, self._pos)
|
||||
if m:
|
||||
hrgb = m.group(1)
|
||||
|
||||
if len(hrgb) == 3:
|
||||
r = int('0x' + 2 * hrgb[0], 16)
|
||||
g = int('0x' + 2 * hrgb[1], 16)
|
||||
b = int('0x' + 2 * hrgb[2], 16)
|
||||
else:
|
||||
r = int('0x' + hrgb[0:2], 16)
|
||||
g = int('0x' + hrgb[2:4], 16)
|
||||
b = int('0x' + hrgb[4:6], 16)
|
||||
|
||||
self._pos = m.end(0)
|
||||
self.skipws()
|
||||
|
||||
self.expectEnd()
|
||||
|
||||
return RGB(r / 255.0, g / 255.0, b / 255.0)
|
||||
|
||||
raise ValueError('bad color syntax "%s"' % self._string)
|
||||
|
||||
def parseRGB(self):
|
||||
self.expect('(', 'after "rgb"')
|
||||
self.skipws()
|
||||
|
||||
r = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "rgb"')
|
||||
self.skipws()
|
||||
|
||||
g = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "rgb"')
|
||||
self.skipws()
|
||||
|
||||
b = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "rgb"')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return RGB(r, g, b)
|
||||
|
||||
def parseHSL(self):
|
||||
self.expect('(', 'after "hsl"')
|
||||
self.skipws()
|
||||
|
||||
h = self.parseAngle()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "hsl"')
|
||||
self.skipws()
|
||||
|
||||
s = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "hsl"')
|
||||
self.skipws()
|
||||
|
||||
l = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "hsl"')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return HSL(h, s, l)
|
||||
|
||||
def parseHWB(self):
|
||||
self.expect('(', 'after "hwb"')
|
||||
self.skipws()
|
||||
|
||||
h = self.parseAngle()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "hwb"')
|
||||
self.skipws()
|
||||
|
||||
w = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "hwb"')
|
||||
self.skipws()
|
||||
|
||||
b = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "hwb"')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return HWB(h, w, b)
|
||||
|
||||
def parseCMYK(self):
|
||||
self.expect('(', 'after "cmyk"')
|
||||
self.skipws()
|
||||
|
||||
c = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "cmyk"')
|
||||
self.skipws()
|
||||
|
||||
m = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "cmyk"')
|
||||
self.skipws()
|
||||
|
||||
y = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(',', 'in "cmyk"')
|
||||
self.skipws()
|
||||
|
||||
k = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "cmyk"')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return CMYK(c, m, y, k)
|
||||
|
||||
def parseGray(self):
|
||||
self.expect('(', 'after "gray"')
|
||||
self.skipws()
|
||||
|
||||
g = self.parseValue()
|
||||
|
||||
self.skipws()
|
||||
self.expect(')', 'at end of "gray')
|
||||
|
||||
self.skipws()
|
||||
self.expectEnd()
|
||||
|
||||
return Gray(g)
|
||||
|
||||
def parseValue(self):
|
||||
n = self.parseNumber()
|
||||
self.skipws()
|
||||
if self._string[self._pos] == '%':
|
||||
n = n / 100.0
|
||||
self.pos += 1
|
||||
return n
|
||||
|
||||
def parseAngle(self):
|
||||
n = self.parseNumber()
|
||||
self.skipws()
|
||||
tok = self.getToken()
|
||||
if tok == 'rad':
|
||||
n = n * 180.0 / math.pi
|
||||
elif tok == 'grad' or tok == 'gon':
|
||||
n = n * 0.9
|
||||
elif tok != 'deg':
|
||||
raise ValueError('bad angle unit "%s"' % tok)
|
||||
return n
|
||||
|
||||
_color_re = re.compile('\s*(#|rgb|hsl|hwb|cmyk|gray|grey|%s)'
|
||||
% '|'.join(_x11_colors.keys()))
|
||||
def isAColor(s):
|
||||
return _color_re.match(s)
|
||||
|
||||
def parseColor(s):
|
||||
return ColorParser(s).parseColor()
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/colors.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/colors.pyc
generated
vendored
Normal file
Binary file not shown.
282
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/core.py
generated
vendored
Normal file
282
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/core.py
generated
vendored
Normal file
@@ -0,0 +1,282 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
reload(sys) # Reload is a hack
|
||||
sys.setdefaultencoding('UTF8')
|
||||
|
||||
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), "..")))
|
||||
|
||||
try:
|
||||
{}.iteritems
|
||||
iteritems = lambda x: x.iteritems()
|
||||
iterkeys = lambda x: x.iterkeys()
|
||||
except AttributeError:
|
||||
iteritems = lambda x: x.items()
|
||||
iterkeys = lambda x: x.keys()
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
import biplist
|
||||
from mac_alias import *
|
||||
from ds_store import *
|
||||
|
||||
from colors import parseColor
|
||||
|
||||
try:
|
||||
from badge import badge
|
||||
except ImportError:
|
||||
badge = None
|
||||
|
||||
class DMGError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def build_dmg():
|
||||
options = {
|
||||
'icon': None,
|
||||
'badge_icon': None,
|
||||
'sidebar_width': 180,
|
||||
'arrange_by': None,
|
||||
'grid_offset': (0, 0),
|
||||
'grid_spacing': 100.0,
|
||||
'scroll_position': (0.0, 0.0),
|
||||
'show_icon_preview': False,
|
||||
'text_size': os.environ['iconTextSize'],
|
||||
'icon_size': os.environ['iconSize'],
|
||||
'include_icon_view_settings': 'auto',
|
||||
'include_list_view_settings': 'auto',
|
||||
'list_icon_size': 16.0,
|
||||
'list_text_size': 12.0,
|
||||
'list_scroll_position': (0, 0),
|
||||
'list_sort_by': 'name',
|
||||
'list_columns': ('name', 'date-modified', 'size', 'kind', 'date-added'),
|
||||
'list_column_widths': {
|
||||
'name': 300,
|
||||
'date-modified': 181,
|
||||
'date-created': 181,
|
||||
'date-added': 181,
|
||||
'date-last-opened': 181,
|
||||
'size': 97,
|
||||
'kind': 115,
|
||||
'label': 100,
|
||||
'version': 75,
|
||||
'comments': 300,
|
||||
},
|
||||
'list_column_sort_directions': {
|
||||
'name': 'ascending',
|
||||
'date-modified': 'descending',
|
||||
'date-created': 'descending',
|
||||
'date-added': 'descending',
|
||||
'date-last-opened': 'descending',
|
||||
'size': 'descending',
|
||||
'kind': 'ascending',
|
||||
'label': 'ascending',
|
||||
'version': 'ascending',
|
||||
'comments': 'ascending',
|
||||
}
|
||||
}
|
||||
|
||||
# Set up the finder data
|
||||
bwsp = {
|
||||
'ShowStatusBar': False,
|
||||
'ContainerShowSidebar': False,
|
||||
'PreviewPaneVisibility': False,
|
||||
'SidebarWidth': options['sidebar_width'],
|
||||
'ShowTabView': False,
|
||||
'ShowToolbar': False,
|
||||
'ShowPathbar': False,
|
||||
'ShowSidebar': False
|
||||
}
|
||||
|
||||
window_x = os.environ.get('windowX')
|
||||
if window_x:
|
||||
window_y = os.environ['windowY']
|
||||
bwsp['WindowBounds'] = '{{%s, %s}, {%s, %s}}' % (window_x,
|
||||
window_y,
|
||||
os.environ['windowWidth'],
|
||||
os.environ['windowHeight'])
|
||||
|
||||
arrange_options = {
|
||||
'name': 'name',
|
||||
'date-modified': 'dateModified',
|
||||
'date-created': 'dateCreated',
|
||||
'date-added': 'dateAdded',
|
||||
'date-last-opened': 'dateLastOpened',
|
||||
'size': 'size',
|
||||
'kind': 'kind',
|
||||
'label': 'label',
|
||||
}
|
||||
|
||||
icvp = {
|
||||
'viewOptionsVersion': 1,
|
||||
'backgroundType': 0,
|
||||
'backgroundColorRed': 1.0,
|
||||
'backgroundColorGreen': 1.0,
|
||||
'backgroundColorBlue': 1.0,
|
||||
'gridOffsetX': float(options['grid_offset'][0]),
|
||||
'gridOffsetY': float(options['grid_offset'][1]),
|
||||
'gridSpacing': float(options['grid_spacing']),
|
||||
'arrangeBy': str(arrange_options.get(options['arrange_by'], 'none')),
|
||||
'showIconPreview': options['show_icon_preview'] == True,
|
||||
'showItemInfo': False,
|
||||
'labelOnBottom': True,
|
||||
'textSize': float(options['text_size']),
|
||||
'iconSize': float(options['icon_size']),
|
||||
'scrollPositionX': float(options['scroll_position'][0]),
|
||||
'scrollPositionY': float(options['scroll_position'][1])
|
||||
}
|
||||
|
||||
columns = {
|
||||
'name': 'name',
|
||||
'date-modified': 'dateModified',
|
||||
'date-created': 'dateCreated',
|
||||
'date-added': 'dateAdded',
|
||||
'date-last-opened': 'dateLastOpened',
|
||||
'size': 'size',
|
||||
'kind': 'kind',
|
||||
'label': 'label',
|
||||
'version': 'version',
|
||||
'comments': 'comments'
|
||||
}
|
||||
|
||||
default_widths = {
|
||||
'name': 300,
|
||||
'date-modified': 181,
|
||||
'date-created': 181,
|
||||
'date-added': 181,
|
||||
'date-last-opened': 181,
|
||||
'size': 97,
|
||||
'kind': 115,
|
||||
'label': 100,
|
||||
'version': 75,
|
||||
'comments': 300,
|
||||
}
|
||||
|
||||
default_sort_directions = {
|
||||
'name': 'ascending',
|
||||
'date-modified': 'descending',
|
||||
'date-created': 'descending',
|
||||
'date-added': 'descending',
|
||||
'date-last-opened': 'descending',
|
||||
'size': 'descending',
|
||||
'kind': 'ascending',
|
||||
'label': 'ascending',
|
||||
'version': 'ascending',
|
||||
'comments': 'ascending',
|
||||
}
|
||||
|
||||
lsvp = {
|
||||
'viewOptionsVersion': 1,
|
||||
'sortColumn': columns.get(options['list_sort_by'], 'name'),
|
||||
'textSize': float(options['list_text_size']),
|
||||
'iconSize': float(options['list_icon_size']),
|
||||
'showIconPreview': options['show_icon_preview'],
|
||||
'scrollPositionX': options['list_scroll_position'][0],
|
||||
'scrollPositionY': options['list_scroll_position'][1],
|
||||
'useRelativeDates': True,
|
||||
'calculateAllSizes': False,
|
||||
}
|
||||
|
||||
lsvp['columns'] = {}
|
||||
cndx = {}
|
||||
|
||||
for n, column in enumerate(options['list_columns']):
|
||||
cndx[column] = n
|
||||
width = options['list_column_widths'].get(column, default_widths[column])
|
||||
asc = 'ascending' == options['list_column_sort_directions'].get(column, default_sort_directions[column])
|
||||
|
||||
lsvp['columns'][columns[column]] = {
|
||||
'index': n,
|
||||
'width': width,
|
||||
'identifier': columns[column],
|
||||
'visible': True,
|
||||
'ascending': asc
|
||||
}
|
||||
|
||||
n = len(options['list_columns'])
|
||||
for k in iterkeys(columns):
|
||||
if cndx.get(k, None) is None:
|
||||
cndx[k] = n
|
||||
width = default_widths[k]
|
||||
asc = 'ascending' == default_sort_directions[k]
|
||||
|
||||
lsvp['columns'][columns[column]] = {
|
||||
'index': n,
|
||||
'width': width,
|
||||
'identifier': columns[column],
|
||||
'visible': False,
|
||||
'ascending': asc
|
||||
}
|
||||
|
||||
n += 1
|
||||
|
||||
default_view = 'icon-view'
|
||||
views = {
|
||||
'icon-view': b'icnv',
|
||||
'column-view': b'clmv',
|
||||
'list-view': b'Nlsv',
|
||||
'coverflow': b'Flwv'
|
||||
}
|
||||
|
||||
icvl = (b'type', views.get(default_view, 'icnv'))
|
||||
|
||||
include_icon_view_settings = default_view == 'icon-view' \
|
||||
or options['include_icon_view_settings'] not in \
|
||||
('auto', 'no', 0, False, None)
|
||||
include_list_view_settings = default_view in ('list-view', 'coverflow') \
|
||||
or options['include_list_view_settings'] not in \
|
||||
('auto', 'no', 0, False, None)
|
||||
|
||||
try:
|
||||
background_bmk = None
|
||||
|
||||
background_color = os.environ.get('backgroundColor')
|
||||
background_file = os.environ.get('backgroundFile')
|
||||
|
||||
if background_color:
|
||||
c = parseColor(background_color).to_rgb()
|
||||
|
||||
icvp['backgroundType'] = 1
|
||||
icvp['backgroundColorRed'] = float(c.r)
|
||||
icvp['backgroundColorGreen'] = float(c.g)
|
||||
icvp['backgroundColorBlue'] = float(c.b)
|
||||
elif background_file:
|
||||
alias = Alias.for_file(background_file)
|
||||
background_bmk = Bookmark.for_file(background_file)
|
||||
|
||||
icvp['backgroundType'] = 2
|
||||
icvp['backgroundImageAlias'] = biplist.Data(alias.to_bytes())
|
||||
|
||||
image_dsstore = os.path.join(os.environ['volumePath'], '.DS_Store')
|
||||
|
||||
f = "icon_locations = {\n" + os.environ['iconLocations'] + "\n}"
|
||||
exec (f, options, options)
|
||||
|
||||
with DSStore.open(image_dsstore, 'w+') as d:
|
||||
d['.']['vSrn'] = ('long', 1)
|
||||
d['.']['bwsp'] = bwsp
|
||||
if include_icon_view_settings:
|
||||
d['.']['icvp'] = icvp
|
||||
if background_bmk:
|
||||
d['.']['pBBk'] = background_bmk
|
||||
if include_list_view_settings:
|
||||
d['.']['lsvp'] = lsvp
|
||||
d['.']['icvl'] = icvl
|
||||
|
||||
d['.background']['Iloc'] = (2560, 170)
|
||||
d['.DS_Store']['Iloc'] = (2610, 170)
|
||||
d['.fseventsd']['Iloc'] = (2660, 170)
|
||||
d['.Trashes']['Iloc'] = (2710, 170)
|
||||
d['.VolumeIcon.icns']['Iloc'] = (2760, 170)
|
||||
|
||||
for k, v in iteritems(options['icon_locations']):
|
||||
d[k]['Iloc'] = v
|
||||
except:
|
||||
raise
|
||||
|
||||
build_dmg()
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/core.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/dmgbuild/core.pyc
generated
vendored
Normal file
Binary file not shown.
3
buildfiles/node_modules/dmg-builder/vendor/ds_store/__init__.py
generated
vendored
Normal file
3
buildfiles/node_modules/dmg-builder/vendor/ds_store/__init__.py
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
from .store import DSStore, DSStoreEntry
|
||||
|
||||
__all__ = ['DSStore', 'DSStoreEntry']
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/ds_store/__init__.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/ds_store/__init__.pyc
generated
vendored
Normal file
Binary file not shown.
478
buildfiles/node_modules/dmg-builder/vendor/ds_store/buddy.py
generated
vendored
Normal file
478
buildfiles/node_modules/dmg-builder/vendor/ds_store/buddy.py
generated
vendored
Normal file
@@ -0,0 +1,478 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import bisect
|
||||
import struct
|
||||
import binascii
|
||||
|
||||
try:
|
||||
{}.iterkeys
|
||||
iterkeys = lambda x: x.iterkeys()
|
||||
except AttributeError:
|
||||
iterkeys = lambda x: x.keys()
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
class BuddyError(Exception):
|
||||
pass
|
||||
|
||||
class Block(object):
|
||||
def __init__(self, allocator, offset, size):
|
||||
self._allocator = allocator
|
||||
self._offset = offset
|
||||
self._size = size
|
||||
self._value = bytearray(allocator.read(offset, size))
|
||||
self._pos = 0
|
||||
self._dirty = False
|
||||
|
||||
def __len__(self):
|
||||
return self._size
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
if self._dirty:
|
||||
self.flush()
|
||||
|
||||
def flush(self):
|
||||
if self._dirty:
|
||||
self._dirty = False
|
||||
self._allocator.write(self._offset, self._value)
|
||||
|
||||
def invalidate(self):
|
||||
self._dirty = False
|
||||
|
||||
def zero_fill(self):
|
||||
len = self._size - self._pos
|
||||
zeroes = b'\0' * len
|
||||
self._value[self._pos:self._size] = zeroes
|
||||
self._dirty = True
|
||||
|
||||
def tell(self):
|
||||
return self._pos
|
||||
|
||||
def seek(self, pos, whence=os.SEEK_SET):
|
||||
if whence == os.SEEK_CUR:
|
||||
pos += self._pos
|
||||
elif whence == os.SEEK_END:
|
||||
pos = self._size - pos
|
||||
|
||||
if pos < 0 or pos > self._size:
|
||||
raise ValueError('Seek out of range in Block instance')
|
||||
|
||||
self._pos = pos
|
||||
|
||||
def read(self, size_or_format):
|
||||
if isinstance(size_or_format, (str, unicode, bytes)):
|
||||
size = struct.calcsize(size_or_format)
|
||||
fmt = size_or_format
|
||||
else:
|
||||
size = size_or_format
|
||||
fmt = None
|
||||
|
||||
if self._size - self._pos < size:
|
||||
raise BuddyError('Unable to read %lu bytes in block' % size)
|
||||
|
||||
data = self._value[self._pos:self._pos + size]
|
||||
self._pos += size
|
||||
|
||||
if fmt is not None:
|
||||
if isinstance(data, bytearray):
|
||||
return struct.unpack_from(fmt, bytes(data))
|
||||
else:
|
||||
return struct.unpack(fmt, data)
|
||||
else:
|
||||
return data
|
||||
|
||||
def write(self, data_or_format, *args):
|
||||
if len(args):
|
||||
data = struct.pack(data_or_format, *args)
|
||||
else:
|
||||
data = data_or_format
|
||||
|
||||
if self._pos + len(data) > self._size:
|
||||
raise ValueError('Attempt to write past end of Block')
|
||||
|
||||
self._value[self._pos:self._pos + len(data)] = data
|
||||
self._pos += len(data)
|
||||
|
||||
self._dirty = True
|
||||
|
||||
def insert(self, data_or_format, *args):
|
||||
if len(args):
|
||||
data = struct.pack(data_or_format, *args)
|
||||
else:
|
||||
data = data_or_format
|
||||
|
||||
del self._value[-len(data):]
|
||||
self._value[self._pos:self._pos] = data
|
||||
self._pos += len(data)
|
||||
|
||||
self._dirty = True
|
||||
|
||||
def delete(self, size):
|
||||
if self._pos + size > self._size:
|
||||
raise ValueError('Attempt to delete past end of Block')
|
||||
del self._value[self._pos:self._pos + size]
|
||||
self._value += b'\0' * size
|
||||
self._dirty = True
|
||||
|
||||
def __str__(self):
|
||||
return binascii.b2a_hex(self._value)
|
||||
|
||||
class Allocator(object):
|
||||
def __init__(self, the_file):
|
||||
self._file = the_file
|
||||
self._dirty = False
|
||||
|
||||
self._file.seek(0)
|
||||
|
||||
# Read the header
|
||||
magic1, magic2, offset, size, offset2, self._unknown1 \
|
||||
= self.read(-4, '>I4sIII16s')
|
||||
|
||||
if magic2 != b'Bud1' or magic1 != 1:
|
||||
raise BuddyError('Not a buddy file')
|
||||
|
||||
if offset != offset2:
|
||||
raise BuddyError('Root addresses differ')
|
||||
|
||||
self._root = Block(self, offset, size)
|
||||
|
||||
# Read the block offsets
|
||||
count, self._unknown2 = self._root.read('>II')
|
||||
self._offsets = []
|
||||
c = (count + 255) & ~255
|
||||
while c:
|
||||
self._offsets += self._root.read('>256I')
|
||||
c -= 256
|
||||
self._offsets = self._offsets[:count]
|
||||
|
||||
# Read the TOC
|
||||
self._toc = {}
|
||||
count = self._root.read('>I')[0]
|
||||
for n in range(count):
|
||||
nlen = self._root.read('B')[0]
|
||||
name = bytes(self._root.read(nlen))
|
||||
value = self._root.read('>I')[0]
|
||||
self._toc[name] = value
|
||||
|
||||
# Read the free lists
|
||||
self._free = []
|
||||
for n in range(32):
|
||||
count = self._root.read('>I')
|
||||
self._free.append(list(self._root.read('>%uI' % count)))
|
||||
|
||||
@classmethod
|
||||
def open(cls, file_or_name, mode='r+'):
|
||||
if isinstance(file_or_name, (str, unicode)):
|
||||
if not 'b' in mode:
|
||||
mode = mode[:1] + 'b' + mode[1:]
|
||||
f = open(file_or_name, mode)
|
||||
else:
|
||||
f = file_or_name
|
||||
|
||||
if 'w' in mode:
|
||||
# Create an empty file in this case
|
||||
f.truncate()
|
||||
|
||||
# An empty root block needs 1264 bytes:
|
||||
#
|
||||
# 0 4 offset count
|
||||
# 4 4 unknown
|
||||
# 8 4 root block offset (2048)
|
||||
# 12 255 * 4 padding (offsets are in multiples of 256)
|
||||
# 1032 4 toc count (0)
|
||||
# 1036 228 free list
|
||||
# total 1264
|
||||
|
||||
# The free list will contain the following:
|
||||
#
|
||||
# 0 5 * 4 no blocks of width less than 5
|
||||
# 20 6 * 8 1 block each of widths 5 to 10
|
||||
# 68 4 no blocks of width 11 (allocated for the root)
|
||||
# 72 19 * 8 1 block each of widths 12 to 30
|
||||
# 224 4 no blocks of width 31
|
||||
# total 228
|
||||
#
|
||||
# (The reason for this layout is that we allocate 2**5 bytes for
|
||||
# the header, which splits the initial 2GB region into every size
|
||||
# below 2**31, including *two* blocks of size 2**5, one of which
|
||||
# we take. The root block itself then needs a block of size
|
||||
# 2**11. Conveniently, each of these initial blocks will be
|
||||
# located at offset 2**n where n is its width.)
|
||||
|
||||
# Write the header
|
||||
header = struct.pack(b'>I4sIII16s',
|
||||
1, b'Bud1',
|
||||
2048, 1264, 2048,
|
||||
b'\x00\x00\x10\x0c'
|
||||
b'\x00\x00\x00\x87'
|
||||
b'\x00\x00\x20\x0b'
|
||||
b'\x00\x00\x00\x00')
|
||||
f.write(header)
|
||||
f.write(b'\0' * 2016)
|
||||
|
||||
# Write the root block
|
||||
free_list = [struct.pack(b'>5I', 0, 0, 0, 0, 0)]
|
||||
for n in range(5, 11):
|
||||
free_list.append(struct.pack(b'>II', 1, 2**n))
|
||||
free_list.append(struct.pack(b'>I', 0))
|
||||
for n in range(12, 31):
|
||||
free_list.append(struct.pack(b'>II', 1, 2**n))
|
||||
free_list.append(struct.pack(b'>I', 0))
|
||||
|
||||
root = b''.join([struct.pack(b'>III', 1, 0, 2048 | 5),
|
||||
struct.pack(b'>I', 0) * 255,
|
||||
struct.pack(b'>I', 0)] + free_list)
|
||||
f.write(root)
|
||||
|
||||
return Allocator(f)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
self.flush()
|
||||
self._file.close()
|
||||
|
||||
def flush(self):
|
||||
if self._dirty:
|
||||
size = self._root_block_size()
|
||||
self.allocate(size, 0)
|
||||
with self.get_block(0) as rblk:
|
||||
self._write_root_block_into(rblk)
|
||||
|
||||
addr = self._offsets[0]
|
||||
offset = addr & ~0x1f
|
||||
size = 1 << (addr & 0x1f)
|
||||
|
||||
self._file.seek(0, os.SEEK_SET)
|
||||
self._file.write(struct.pack(b'>I4sIII16s',
|
||||
1, b'Bud1',
|
||||
offset, size, offset,
|
||||
self._unknown1))
|
||||
|
||||
self._dirty = False
|
||||
|
||||
self._file.flush()
|
||||
|
||||
def read(self, offset, size_or_format):
|
||||
"""Read data at `offset', or raise an exception. `size_or_format'
|
||||
may either be a byte count, in which case we return raw data,
|
||||
or a format string for `struct.unpack', in which case we
|
||||
work out the size and unpack the data before returning it."""
|
||||
# N.B. There is a fixed offset of four bytes(!)
|
||||
self._file.seek(offset + 4, os.SEEK_SET)
|
||||
|
||||
if isinstance(size_or_format, (str, unicode)):
|
||||
size = struct.calcsize(size_or_format)
|
||||
fmt = size_or_format
|
||||
else:
|
||||
size = size_or_format
|
||||
fmt = None
|
||||
|
||||
ret = self._file.read(size)
|
||||
if len(ret) < size:
|
||||
ret += b'\0' * (size - len(ret))
|
||||
|
||||
if fmt is not None:
|
||||
if isinstance(ret, bytearray):
|
||||
ret = struct.unpack_from(fmt, bytes(ret))
|
||||
else:
|
||||
ret = struct.unpack(fmt, ret)
|
||||
|
||||
return ret
|
||||
|
||||
def write(self, offset, data_or_format, *args):
|
||||
"""Write data at `offset', or raise an exception. `data_or_format'
|
||||
may either be the data to write, or a format string for `struct.pack',
|
||||
in which case we pack the additional arguments and write the
|
||||
resulting data."""
|
||||
# N.B. There is a fixed offset of four bytes(!)
|
||||
self._file.seek(offset + 4, os.SEEK_SET)
|
||||
|
||||
if len(args):
|
||||
data = struct.pack(data_or_format, *args)
|
||||
else:
|
||||
data = data_or_format
|
||||
|
||||
self._file.write(data)
|
||||
|
||||
def get_block(self, block):
|
||||
try:
|
||||
addr = self._offsets[block]
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
offset = addr & ~0x1f
|
||||
size = 1 << (addr & 0x1f)
|
||||
|
||||
return Block(self, offset, size)
|
||||
|
||||
def _root_block_size(self):
|
||||
"""Return the number of bytes required by the root block."""
|
||||
# Offsets
|
||||
size = 8
|
||||
size += 4 * ((len(self._offsets) + 255) & ~255)
|
||||
|
||||
# TOC
|
||||
size += 4
|
||||
size += sum([5 + len(s) for s in self._toc])
|
||||
|
||||
# Free list
|
||||
size += sum([4 + 4 * len(fl) for fl in self._free])
|
||||
|
||||
return size
|
||||
|
||||
def _write_root_block_into(self, block):
|
||||
# Offsets
|
||||
block.write('>II', len(self._offsets), self._unknown2)
|
||||
block.write('>%uI' % len(self._offsets), *self._offsets)
|
||||
extra = len(self._offsets) & 255
|
||||
if extra:
|
||||
block.write(b'\0\0\0\0' * (256 - extra))
|
||||
|
||||
# TOC
|
||||
keys = list(self._toc.keys())
|
||||
keys.sort()
|
||||
|
||||
block.write('>I', len(keys))
|
||||
for k in keys:
|
||||
block.write('B', len(k))
|
||||
block.write(k)
|
||||
block.write('>I', self._toc[k])
|
||||
|
||||
# Free list
|
||||
for w, f in enumerate(self._free):
|
||||
block.write('>I', len(f))
|
||||
if len(f):
|
||||
block.write('>%uI' % len(f), *f)
|
||||
|
||||
def _buddy(self, offset, width):
|
||||
f = self._free[width]
|
||||
b = offset ^ (1 << width)
|
||||
|
||||
try:
|
||||
ndx = f.index(b)
|
||||
except ValueError:
|
||||
ndx = None
|
||||
|
||||
return (f, b, ndx)
|
||||
|
||||
def _release(self, offset, width):
|
||||
# Coalesce
|
||||
while True:
|
||||
f,b,ndx = self._buddy(offset, width)
|
||||
|
||||
if ndx is None:
|
||||
break
|
||||
|
||||
offset &= b
|
||||
width += 1
|
||||
del f[ndx]
|
||||
|
||||
# Add to the list
|
||||
bisect.insort(f, offset)
|
||||
|
||||
# Mark as dirty
|
||||
self._dirty = True
|
||||
|
||||
def _alloc(self, width):
|
||||
w = width
|
||||
while not self._free[w]:
|
||||
w += 1
|
||||
while w > width:
|
||||
offset = self._free[w].pop(0)
|
||||
w -= 1
|
||||
self._free[w] = [offset, offset ^ (1 << w)]
|
||||
self._dirty = True
|
||||
return self._free[width].pop(0)
|
||||
|
||||
def allocate(self, bytes, block=None):
|
||||
"""Allocate or reallocate a block such that it has space for at least
|
||||
`bytes' bytes."""
|
||||
if block is None:
|
||||
# Find the first unused block
|
||||
try:
|
||||
block = self._offsets.index(0)
|
||||
except ValueError:
|
||||
block = len(self._offsets)
|
||||
self._offsets.append(0)
|
||||
|
||||
# Compute block width
|
||||
width = max(bytes.bit_length(), 5)
|
||||
|
||||
addr = self._offsets[block]
|
||||
offset = addr & ~0x1f
|
||||
|
||||
if addr:
|
||||
blkwidth = addr & 0x1f
|
||||
if blkwidth == width:
|
||||
return block
|
||||
self._release(offset, width)
|
||||
self._offsets[block] = 0
|
||||
|
||||
offset = self._alloc(width)
|
||||
self._offsets[block] = offset | width
|
||||
return block
|
||||
|
||||
def release(self, block):
|
||||
addr = self._offsets[block]
|
||||
|
||||
if addr:
|
||||
width = addr & 0x1f
|
||||
offset = addr & ~0x1f
|
||||
self._release(offset, width)
|
||||
|
||||
if block == len(self._offsets):
|
||||
del self._offsets[block]
|
||||
else:
|
||||
self._offsets[block] = 0
|
||||
|
||||
def __len__(self):
|
||||
return len(self._toc)
|
||||
|
||||
def __getitem__(self, key):
|
||||
if not isinstance(key, (str, unicode)):
|
||||
raise TypeError('Keys must be of string type')
|
||||
if not isinstance(key, bytes):
|
||||
key = key.encode('latin_1')
|
||||
return self._toc[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(key, (str, unicode)):
|
||||
raise TypeError('Keys must be of string type')
|
||||
if not isinstance(key, bytes):
|
||||
key = key.encode('latin_1')
|
||||
self._toc[key] = value
|
||||
self._dirty = True
|
||||
|
||||
def __delitem__(self, key):
|
||||
if not isinstance(key, (str, unicode)):
|
||||
raise TypeError('Keys must be of string type')
|
||||
if not isinstance(key, bytes):
|
||||
key = key.encode('latin_1')
|
||||
del self._toc[key]
|
||||
self._dirty = True
|
||||
|
||||
def iterkeys(self):
|
||||
return iterkeys(self._toc)
|
||||
|
||||
def keys(self):
|
||||
return iterkeys(self._toc)
|
||||
|
||||
def __iter__(self):
|
||||
return iterkeys(self._toc)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._toc
|
||||
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/ds_store/buddy.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/ds_store/buddy.pyc
generated
vendored
Normal file
Binary file not shown.
1251
buildfiles/node_modules/dmg-builder/vendor/ds_store/store.py
generated
vendored
Normal file
1251
buildfiles/node_modules/dmg-builder/vendor/ds_store/store.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
BIN
buildfiles/node_modules/dmg-builder/vendor/ds_store/store.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/ds_store/store.pyc
generated
vendored
Normal file
Binary file not shown.
27
buildfiles/node_modules/dmg-builder/vendor/mac_alias/__init__.py
generated
vendored
Normal file
27
buildfiles/node_modules/dmg-builder/vendor/mac_alias/__init__.py
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
from .alias import *
|
||||
from .bookmark import *
|
||||
|
||||
__all__ = [ 'ALIAS_KIND_FILE', 'ALIAS_KIND_FOLDER',
|
||||
'ALIAS_HFS_VOLUME_SIGNATURE',
|
||||
'ALIAS_FIXED_DISK', 'ALIAS_NETWORK_DISK', 'ALIAS_400KB_FLOPPY_DISK',
|
||||
'ALIAS_800KB_FLOPPY_DISK', 'ALIAS_1_44MB_FLOPPY_DISK',
|
||||
'ALIAS_EJECTABLE_DISK',
|
||||
'ALIAS_NO_CNID',
|
||||
'kBookmarkPath', 'kBookmarkCNIDPath', 'kBookmarkFileProperties',
|
||||
'kBookmarkFileName', 'kBookmarkFileID', 'kBookmarkFileCreationDate',
|
||||
'kBookmarkTOCPath', 'kBookmarkVolumePath',
|
||||
'kBookmarkVolumeURL', 'kBookmarkVolumeName', 'kBookmarkVolumeUUID',
|
||||
'kBookmarkVolumeSize', 'kBookmarkVolumeCreationDate',
|
||||
'kBookmarkVolumeProperties', 'kBookmarkContainingFolder',
|
||||
'kBookmarkUserName', 'kBookmarkUID', 'kBookmarkWasFileReference',
|
||||
'kBookmarkCreationOptions', 'kBookmarkURLLengths',
|
||||
'kBookmarkSecurityExtension',
|
||||
'AppleShareInfo',
|
||||
'VolumeInfo',
|
||||
'TargetInfo',
|
||||
'Alias',
|
||||
'Bookmark',
|
||||
'Data',
|
||||
'URL' ]
|
||||
|
||||
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/__init__.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/__init__.pyc
generated
vendored
Normal file
Binary file not shown.
612
buildfiles/node_modules/dmg-builder/vendor/mac_alias/alias.py
generated
vendored
Normal file
612
buildfiles/node_modules/dmg-builder/vendor/mac_alias/alias.py
generated
vendored
Normal file
@@ -0,0 +1,612 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import division
|
||||
|
||||
import struct
|
||||
import datetime
|
||||
import io
|
||||
import re
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
import sys
|
||||
from unicodedata import normalize
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
from . import osx
|
||||
|
||||
try:
|
||||
long
|
||||
except NameError:
|
||||
long = int
|
||||
|
||||
from .utils import *
|
||||
|
||||
ALIAS_KIND_FILE = 0
|
||||
ALIAS_KIND_FOLDER = 1
|
||||
|
||||
ALIAS_HFS_VOLUME_SIGNATURE = b'H+'
|
||||
|
||||
ALIAS_FIXED_DISK = 0
|
||||
ALIAS_NETWORK_DISK = 1
|
||||
ALIAS_400KB_FLOPPY_DISK = 2
|
||||
ALIAS_800KB_FLOPPY_DISK = 3
|
||||
ALIAS_1_44MB_FLOPPY_DISK = 4
|
||||
ALIAS_EJECTABLE_DISK = 5
|
||||
|
||||
ALIAS_NO_CNID = 0xffffffff
|
||||
|
||||
def encode_utf8(s):
|
||||
if isinstance(s, bytes):
|
||||
return s
|
||||
return s.encode('utf-8')
|
||||
|
||||
def decode_utf8(s):
|
||||
if isinstance(s, bytes):
|
||||
return s.decode('utf-8')
|
||||
return s
|
||||
|
||||
class AppleShareInfo (object):
|
||||
def __init__(self, zone=None, server=None, user=None):
|
||||
#: The AppleShare zone
|
||||
self.zone = zone
|
||||
#: The AFP server
|
||||
self.server = server
|
||||
#: The username
|
||||
self.user = user
|
||||
|
||||
def __repr__(self):
|
||||
return 'AppleShareInfo(%r,%r,%r)' % (self.zone, self.server, self.user)
|
||||
|
||||
class VolumeInfo (object):
|
||||
def __init__(self, name, creation_date, fs_type, disk_type,
|
||||
attribute_flags, fs_id, appleshare_info=None,
|
||||
driver_name=None, posix_path=None, disk_image_alias=None,
|
||||
dialup_info=None, network_mount_info=None):
|
||||
#: The name of the volume on which the target resides
|
||||
self.name = name
|
||||
|
||||
#: The creation date of the target's volume
|
||||
self.creation_date = creation_date
|
||||
|
||||
#: The filesystem type (a two character code, e.g. ``b'H+'`` for HFS+)
|
||||
self.fs_type = fs_type
|
||||
|
||||
#: The type of disk; should be one of
|
||||
#:
|
||||
#: * ALIAS_FIXED_DISK
|
||||
#: * ALIAS_NETWORK_DISK
|
||||
#: * ALIAS_400KB_FLOPPY_DISK
|
||||
#: * ALIAS_800KB_FLOPPY_DISK
|
||||
#: * ALIAS_1_44MB_FLOPPY_DISK
|
||||
#: * ALIAS_EJECTABLE_DISK
|
||||
self.disk_type = disk_type
|
||||
|
||||
#: Filesystem attribute flags (from HFS volume header)
|
||||
self.attribute_flags = attribute_flags
|
||||
|
||||
#: Filesystem identifier
|
||||
self.fs_id = fs_id
|
||||
|
||||
#: AppleShare information (for automatic remounting of network shares)
|
||||
#: *(optional)*
|
||||
self.appleshare_info = appleshare_info
|
||||
|
||||
#: Driver name (*probably* contains a disk driver name on older Macs)
|
||||
#: *(optional)*
|
||||
self.driver_name = driver_name
|
||||
|
||||
#: POSIX path of the mount point of the target's volume
|
||||
#: *(optional)*
|
||||
self.posix_path = posix_path
|
||||
|
||||
#: :class:`Alias` object pointing at the disk image on which the
|
||||
#: target's volume resides *(optional)*
|
||||
self.disk_image_alias = disk_image_alias
|
||||
|
||||
#: Dialup information (for automatic establishment of dialup connections)
|
||||
self.dialup_info = dialup_info
|
||||
|
||||
#: Network mount information (for automatic remounting)
|
||||
self.network_mount_info = network_mount_info
|
||||
|
||||
def __repr__(self):
|
||||
args = ['name', 'creation_date', 'fs_type', 'disk_type',
|
||||
'attribute_flags', 'fs_id']
|
||||
values = []
|
||||
for a in args:
|
||||
v = getattr(self, a)
|
||||
values.append(repr(v))
|
||||
|
||||
kwargs = ['appleshare_info', 'driver_name', 'posix_path',
|
||||
'disk_image_alias', 'dialup_info', 'network_mount_info']
|
||||
for a in kwargs:
|
||||
v = getattr(self, a)
|
||||
if v is not None:
|
||||
values.append('%s=%r' % (a, v))
|
||||
return 'VolumeInfo(%s)' % ','.join(values)
|
||||
|
||||
class TargetInfo (object):
|
||||
def __init__(self, kind, filename, folder_cnid, cnid, creation_date,
|
||||
creator_code, type_code, levels_from=-1, levels_to=-1,
|
||||
folder_name=None, cnid_path=None, carbon_path=None,
|
||||
posix_path=None, user_home_prefix_len=None):
|
||||
#: Either ALIAS_KIND_FILE or ALIAS_KIND_FOLDER
|
||||
self.kind = kind
|
||||
|
||||
#: The filename of the target
|
||||
self.filename = filename
|
||||
|
||||
#: The CNID (Catalog Node ID) of the target's containing folder;
|
||||
#: CNIDs are similar to but different than traditional UNIX inode
|
||||
#: numbers
|
||||
self.folder_cnid = folder_cnid
|
||||
|
||||
#: The CNID (Catalog Node ID) of the target
|
||||
self.cnid = cnid
|
||||
|
||||
#: The target's *creation* date.
|
||||
self.creation_date = creation_date
|
||||
|
||||
#: The target's Mac creator code (a four-character binary string)
|
||||
self.creator_code = creator_code
|
||||
|
||||
#: The target's Mac type code (a four-character binary string)
|
||||
self.type_code = type_code
|
||||
|
||||
#: The depth of the alias? Always seems to be -1 on OS X.
|
||||
self.levels_from = levels_from
|
||||
|
||||
#: The depth of the target? Always seems to be -1 on OS X.
|
||||
self.levels_to = levels_to
|
||||
|
||||
#: The (POSIX) name of the target's containing folder. *(optional)*
|
||||
self.folder_name = folder_name
|
||||
|
||||
#: The path from the volume root as a sequence of CNIDs. *(optional)*
|
||||
self.cnid_path = cnid_path
|
||||
|
||||
#: The Carbon path of the target *(optional)*
|
||||
self.carbon_path = carbon_path
|
||||
|
||||
#: The POSIX path of the target relative to the volume root. Note
|
||||
#: that this may or may not have a leading '/' character, but it is
|
||||
#: always relative to the containing volume. *(optional)*
|
||||
self.posix_path = posix_path
|
||||
|
||||
#: If the path points into a user's home folder, the number of folders
|
||||
#: deep that we go before we get to that home folder. *(optional)*
|
||||
self.user_home_prefix_len = user_home_prefix_len
|
||||
|
||||
def __repr__(self):
|
||||
args = ['kind', 'filename', 'folder_cnid', 'cnid', 'creation_date',
|
||||
'creator_code', 'type_code']
|
||||
values = []
|
||||
for a in args:
|
||||
v = getattr(self, a)
|
||||
values.append(repr(v))
|
||||
|
||||
if self.levels_from != -1:
|
||||
values.append('levels_from=%r' % self.levels_from)
|
||||
if self.levels_to != -1:
|
||||
values.append('levels_to=%r' % self.levels_to)
|
||||
|
||||
kwargs = ['folder_name', 'cnid_path', 'carbon_path',
|
||||
'posix_path', 'user_home_prefix_len']
|
||||
for a in kwargs:
|
||||
v = getattr(self, a)
|
||||
values.append('%s=%r' % (a, v))
|
||||
|
||||
return 'TargetInfo(%s)' % ','.join(values)
|
||||
|
||||
TAG_CARBON_FOLDER_NAME = 0
|
||||
TAG_CNID_PATH = 1
|
||||
TAG_CARBON_PATH = 2
|
||||
TAG_APPLESHARE_ZONE = 3
|
||||
TAG_APPLESHARE_SERVER_NAME = 4
|
||||
TAG_APPLESHARE_USERNAME = 5
|
||||
TAG_DRIVER_NAME = 6
|
||||
TAG_NETWORK_MOUNT_INFO = 9
|
||||
TAG_DIALUP_INFO = 10
|
||||
TAG_UNICODE_FILENAME = 14
|
||||
TAG_UNICODE_VOLUME_NAME = 15
|
||||
TAG_HIGH_RES_VOLUME_CREATION_DATE = 16
|
||||
TAG_HIGH_RES_CREATION_DATE = 17
|
||||
TAG_POSIX_PATH = 18
|
||||
TAG_POSIX_PATH_TO_MOUNTPOINT = 19
|
||||
TAG_RECURSIVE_ALIAS_OF_DISK_IMAGE = 20
|
||||
TAG_USER_HOME_LENGTH_PREFIX = 21
|
||||
|
||||
class Alias (object):
|
||||
def __init__(self, appinfo=b'\0\0\0\0', version=2, volume=None,
|
||||
target=None, extra=[]):
|
||||
"""Construct a new :class:`Alias` object with the specified
|
||||
contents."""
|
||||
|
||||
#: Application specific information (four byte byte-string)
|
||||
self.appinfo = appinfo
|
||||
|
||||
#: Version (we support only version 2)
|
||||
self.version = version
|
||||
|
||||
#: A :class:`VolumeInfo` object describing the target's volume
|
||||
self.volume = volume
|
||||
|
||||
#: A :class:`TargetInfo` object describing the target
|
||||
self.target = target
|
||||
|
||||
#: A list of extra `(tag, value)` pairs
|
||||
self.extra = list(extra)
|
||||
|
||||
@classmethod
|
||||
def _from_fd(cls, b):
|
||||
appinfo, recsize, version = struct.unpack(b'>4shh', b.read(8))
|
||||
|
||||
if recsize < 150:
|
||||
raise ValueError('Incorrect alias length')
|
||||
|
||||
if version != 2:
|
||||
raise ValueError('Unsupported alias version %u' % version)
|
||||
|
||||
kind, volname, voldate, fstype, disktype, \
|
||||
folder_cnid, filename, cnid, crdate, creator_code, type_code, \
|
||||
levels_from, levels_to, volattrs, volfsid, reserved = \
|
||||
struct.unpack(b'>h28pI2shI64pII4s4shhI2s10s', b.read(142))
|
||||
|
||||
voldate = mac_epoch + datetime.timedelta(seconds=voldate)
|
||||
crdate = mac_epoch + datetime.timedelta(seconds=crdate)
|
||||
|
||||
alias = Alias()
|
||||
alias.appinfo = appinfo
|
||||
|
||||
alias.volume = VolumeInfo (volname.replace('/',':'),
|
||||
voldate, fstype, disktype,
|
||||
volattrs, volfsid)
|
||||
alias.target = TargetInfo (kind, filename.replace('/',':'),
|
||||
folder_cnid, cnid,
|
||||
crdate, creator_code, type_code)
|
||||
alias.target.levels_from = levels_from
|
||||
alias.target.levels_to = levels_to
|
||||
|
||||
tag = struct.unpack(b'>h', b.read(2))[0]
|
||||
|
||||
while tag != -1:
|
||||
length = struct.unpack(b'>h', b.read(2))[0]
|
||||
value = b.read(length)
|
||||
if length & 1:
|
||||
b.read(1)
|
||||
|
||||
if tag == TAG_CARBON_FOLDER_NAME:
|
||||
alias.target.folder_name = value.replace('/',':')
|
||||
elif tag == TAG_CNID_PATH:
|
||||
alias.target.cnid_path = struct.unpack(b'>%uI' % (length // 4),
|
||||
value)
|
||||
elif tag == TAG_CARBON_PATH:
|
||||
alias.target.carbon_path = value
|
||||
elif tag == TAG_APPLESHARE_ZONE:
|
||||
if alias.volume.appleshare_info is None:
|
||||
alias.volume.appleshare_info = AppleShareInfo()
|
||||
alias.volume.appleshare_info.zone = value
|
||||
elif tag == TAG_APPLESHARE_SERVER_NAME:
|
||||
if alias.volume.appleshare_info is None:
|
||||
alias.volume.appleshare_info = AppleShareInfo()
|
||||
alias.volume.appleshare_info.server = value
|
||||
elif tag == TAG_APPLESHARE_USERNAME:
|
||||
if alias.volume.appleshare_info is None:
|
||||
alias.volume.appleshare_info = AppleShareInfo()
|
||||
alias.volume.appleshare_info.user = value
|
||||
elif tag == TAG_DRIVER_NAME:
|
||||
alias.volume.driver_name = value
|
||||
elif tag == TAG_NETWORK_MOUNT_INFO:
|
||||
alias.volume.network_mount_info = value
|
||||
elif tag == TAG_DIALUP_INFO:
|
||||
alias.volume.dialup_info = value
|
||||
elif tag == TAG_UNICODE_FILENAME:
|
||||
alias.target.filename = value[2:].decode('utf-16be')
|
||||
elif tag == TAG_UNICODE_VOLUME_NAME:
|
||||
alias.volume.name = value[2:].decode('utf-16be')
|
||||
elif tag == TAG_HIGH_RES_VOLUME_CREATION_DATE:
|
||||
seconds = struct.unpack(b'>Q', value)[0] / 65536.0
|
||||
alias.volume.creation_date \
|
||||
= mac_epoch + datetime.timedelta(seconds=seconds)
|
||||
elif tag == TAG_HIGH_RES_CREATION_DATE:
|
||||
seconds = struct.unpack(b'>Q', value)[0] / 65536.0
|
||||
alias.target.creation_date \
|
||||
= mac_epoch + datetime.timedelta(seconds=seconds)
|
||||
elif tag == TAG_POSIX_PATH:
|
||||
alias.target.posix_path = value
|
||||
elif tag == TAG_POSIX_PATH_TO_MOUNTPOINT:
|
||||
alias.volume.posix_path = value
|
||||
elif tag == TAG_RECURSIVE_ALIAS_OF_DISK_IMAGE:
|
||||
alias.volume.disk_image_alias = Alias.from_bytes(value)
|
||||
elif tag == TAG_USER_HOME_LENGTH_PREFIX:
|
||||
alias.target.user_home_prefix_len = struct.unpack(b'>h', value)[0]
|
||||
else:
|
||||
alias.extra.append((tag, value))
|
||||
|
||||
tag = struct.unpack(b'>h', b.read(2))[0]
|
||||
|
||||
return alias
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, bytes):
|
||||
"""Construct an :class:`Alias` object given binary Alias data."""
|
||||
with io.BytesIO(bytes) as b:
|
||||
return cls._from_fd(b)
|
||||
|
||||
@classmethod
|
||||
def for_file(cls, path):
|
||||
"""Create an :class:`Alias` that points at the specified file."""
|
||||
if sys.platform != 'darwin':
|
||||
raise Exception('Not implemented (requires special support)')
|
||||
|
||||
path = encode_utf8(path)
|
||||
|
||||
a = Alias()
|
||||
|
||||
# Find the filesystem
|
||||
st = osx.statfs(path)
|
||||
vol_path = st.f_mntonname
|
||||
|
||||
# File and folder names in HFS+ are normalized to a form similar to NFD.
|
||||
# Must be normalized (NFD->NFC) before use to avoid unicode string comparison issues.
|
||||
vol_path = normalize("NFC", vol_path.decode('utf-8')).encode('utf-8')
|
||||
|
||||
# Grab its attributes
|
||||
attrs = [osx.ATTR_CMN_CRTIME,
|
||||
osx.ATTR_VOL_NAME,
|
||||
0, 0, 0]
|
||||
volinfo = osx.getattrlist(vol_path, attrs, 0)
|
||||
|
||||
vol_crtime = volinfo[0]
|
||||
vol_name = encode_utf8(volinfo[1])
|
||||
|
||||
# Also grab various attributes of the file
|
||||
attrs = [(osx.ATTR_CMN_OBJTYPE
|
||||
| osx.ATTR_CMN_CRTIME
|
||||
| osx.ATTR_CMN_FNDRINFO
|
||||
| osx.ATTR_CMN_FILEID
|
||||
| osx.ATTR_CMN_PARENTID), 0, 0, 0, 0]
|
||||
info = osx.getattrlist(path, attrs, osx.FSOPT_NOFOLLOW)
|
||||
|
||||
if info[0] == osx.VDIR:
|
||||
kind = ALIAS_KIND_FOLDER
|
||||
else:
|
||||
kind = ALIAS_KIND_FILE
|
||||
|
||||
cnid = info[3]
|
||||
folder_cnid = info[4]
|
||||
|
||||
dirname, filename = os.path.split(path)
|
||||
|
||||
if dirname == b'' or dirname == b'.':
|
||||
dirname = os.getcwd()
|
||||
|
||||
foldername = os.path.basename(dirname)
|
||||
|
||||
creation_date = info[1]
|
||||
|
||||
if kind == ALIAS_KIND_FILE:
|
||||
creator_code = struct.pack(b'I', info[2].fileInfo.fileCreator)
|
||||
type_code = struct.pack(b'I', info[2].fileInfo.fileType)
|
||||
else:
|
||||
creator_code = b'\0\0\0\0'
|
||||
type_code = b'\0\0\0\0'
|
||||
|
||||
a.target = TargetInfo(kind, filename, folder_cnid, cnid, creation_date,
|
||||
creator_code, type_code)
|
||||
a.volume = VolumeInfo(vol_name, vol_crtime, b'H+',
|
||||
ALIAS_FIXED_DISK, 0, b'\0\0')
|
||||
|
||||
a.target.folder_name = foldername
|
||||
a.volume.posix_path = vol_path
|
||||
|
||||
rel_path = os.path.relpath(path, vol_path)
|
||||
|
||||
# Leave off the initial '/' if vol_path is '/' (no idea why)
|
||||
if vol_path == b'/':
|
||||
a.target.posix_path = rel_path
|
||||
else:
|
||||
a.target.posix_path = b'/' + rel_path
|
||||
|
||||
# Construct the Carbon and CNID paths
|
||||
carbon_path = []
|
||||
cnid_path = []
|
||||
head, tail = os.path.split(rel_path)
|
||||
if not tail:
|
||||
head, tail = os.path.split(head)
|
||||
while head or tail:
|
||||
if head:
|
||||
attrs = [osx.ATTR_CMN_FILEID, 0, 0, 0, 0]
|
||||
info = osx.getattrlist(os.path.join(vol_path, head), attrs, 0)
|
||||
cnid_path.append(info[0])
|
||||
carbon_tail = tail.replace(b':',b'/')
|
||||
carbon_path.insert(0, carbon_tail)
|
||||
head, tail = os.path.split(head)
|
||||
|
||||
carbon_path = vol_name + b':' + b':\0'.join(carbon_path)
|
||||
|
||||
a.target.carbon_path = carbon_path
|
||||
a.target.cnid_path = cnid_path
|
||||
|
||||
return a
|
||||
|
||||
def _to_fd(self, b):
|
||||
# We'll come back and fix the length when we're done
|
||||
pos = b.tell()
|
||||
b.write(struct.pack(b'>4shh', self.appinfo, 0, self.version))
|
||||
|
||||
carbon_volname = encode_utf8(self.volume.name).replace(b':',b'/')
|
||||
carbon_filename = encode_utf8(self.target.filename).replace(b':',b'/')
|
||||
voldate = (self.volume.creation_date - mac_epoch).total_seconds()
|
||||
crdate = (self.target.creation_date - mac_epoch).total_seconds()
|
||||
|
||||
# NOTE: crdate should be in local time, but that's system dependent
|
||||
# (so doing so is ridiculous, and nothing could rely on it).
|
||||
b.write(struct.pack(b'>h28pI2shI64pII4s4shhI2s10s',
|
||||
self.target.kind,
|
||||
carbon_volname, int(voldate),
|
||||
self.volume.fs_type,
|
||||
self.volume.disk_type,
|
||||
self.target.folder_cnid,
|
||||
carbon_filename,
|
||||
self.target.cnid,
|
||||
int(crdate),
|
||||
self.target.creator_code,
|
||||
self.target.type_code,
|
||||
self.target.levels_from,
|
||||
self.target.levels_to,
|
||||
self.volume.attribute_flags,
|
||||
self.volume.fs_id,
|
||||
b'\0'*10))
|
||||
|
||||
# Excuse the odd order; we're copying Finder
|
||||
if self.target.folder_name:
|
||||
carbon_foldername = encode_utf8(self.target.folder_name)\
|
||||
.replace(b':',b'/')
|
||||
b.write(struct.pack(b'>hh', TAG_CARBON_FOLDER_NAME,
|
||||
len(carbon_foldername)))
|
||||
b.write(carbon_foldername)
|
||||
if len(carbon_foldername) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
b.write(struct.pack(b'>hhQhhQ',
|
||||
TAG_HIGH_RES_VOLUME_CREATION_DATE,
|
||||
8, long(voldate * 65536),
|
||||
TAG_HIGH_RES_CREATION_DATE,
|
||||
8, long(crdate * 65536)))
|
||||
|
||||
if self.target.cnid_path:
|
||||
cnid_path = struct.pack(b'>%uI' % len(self.target.cnid_path),
|
||||
*self.target.cnid_path)
|
||||
b.write(struct.pack(b'>hh', TAG_CNID_PATH,
|
||||
len(cnid_path)))
|
||||
b.write(cnid_path)
|
||||
|
||||
if self.target.carbon_path:
|
||||
carbon_path=encode_utf8(self.target.carbon_path)
|
||||
b.write(struct.pack(b'>hh', TAG_CARBON_PATH,
|
||||
len(carbon_path)))
|
||||
b.write(carbon_path)
|
||||
if len(carbon_path) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.appleshare_info:
|
||||
ai = self.volume.appleshare_info
|
||||
if ai.zone:
|
||||
b.write(struct.pack(b'>hh', TAG_APPLESHARE_ZONE,
|
||||
len(ai.zone)))
|
||||
b.write(ai.zone)
|
||||
if len(ai.zone) & 1:
|
||||
b.write(b'\0')
|
||||
if ai.server:
|
||||
b.write(struct.pack(b'>hh', TAG_APPLESHARE_SERVER_NAME,
|
||||
len(ai.server)))
|
||||
b.write(ai.server)
|
||||
if len(ai.server) & 1:
|
||||
b.write(b'\0')
|
||||
if ai.username:
|
||||
b.write(struct.pack(b'>hh', TAG_APPLESHARE_USERNAME,
|
||||
len(ai.username)))
|
||||
b.write(ai.username)
|
||||
if len(ai.username) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.driver_name:
|
||||
driver_name = encode_utf8(self.volume.driver_name)
|
||||
b.write(struct.pack(b'>hh', TAG_DRIVER_NAME,
|
||||
len(driver_name)))
|
||||
b.write(driver_name)
|
||||
if len(driver_name) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.network_mount_info:
|
||||
b.write(struct.pack(b'>hh', TAG_NETWORK_MOUNT_INFO,
|
||||
len(self.volume.network_mount_info)))
|
||||
b.write(self.volume.network_mount_info)
|
||||
if len(self.volume.network_mount_info) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.dialup_info:
|
||||
b.write(struct.pack(b'>hh', TAG_DIALUP_INFO,
|
||||
len(self.volume.network_mount_info)))
|
||||
b.write(self.volume.network_mount_info)
|
||||
if len(self.volume.network_mount_info) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
utf16 = decode_utf8(self.target.filename)\
|
||||
.replace(':','/').encode('utf-16-be')
|
||||
b.write(struct.pack(b'>hhh', TAG_UNICODE_FILENAME,
|
||||
len(utf16) + 2,
|
||||
len(utf16) // 2))
|
||||
b.write(utf16)
|
||||
|
||||
utf16 = decode_utf8(self.volume.name)\
|
||||
.replace(':','/').encode('utf-16-be')
|
||||
b.write(struct.pack(b'>hhh', TAG_UNICODE_VOLUME_NAME,
|
||||
len(utf16) + 2,
|
||||
len(utf16) // 2))
|
||||
b.write(utf16)
|
||||
|
||||
if self.target.posix_path:
|
||||
posix_path = encode_utf8(self.target.posix_path)
|
||||
b.write(struct.pack(b'>hh', TAG_POSIX_PATH,
|
||||
len(posix_path)))
|
||||
b.write(posix_path)
|
||||
if len(posix_path) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.posix_path:
|
||||
posix_path = encode_utf8(self.volume.posix_path)
|
||||
b.write(struct.pack(b'>hh', TAG_POSIX_PATH_TO_MOUNTPOINT,
|
||||
len(posix_path)))
|
||||
b.write(posix_path)
|
||||
if len(posix_path) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.volume.disk_image_alias:
|
||||
d = self.volume.disk_image_alias.to_bytes()
|
||||
b.write(struct.pack(b'>hh', TAG_RECURSIVE_ALIAS_OF_DISK_IMAGE,
|
||||
len(d)))
|
||||
b.write(d)
|
||||
if len(d) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
if self.target.user_home_prefix_len is not None:
|
||||
b.write(struct.pack(b'>hhh', TAG_USER_HOME_LENGTH_PREFIX,
|
||||
2, self.target.user_home_prefix_len))
|
||||
|
||||
for t,v in self.extra:
|
||||
b.write(struct.pack(b'>hh', t, len(v)))
|
||||
b.write(v)
|
||||
if len(v) & 1:
|
||||
b.write(b'\0')
|
||||
|
||||
b.write(struct.pack(b'>hh', -1, 0))
|
||||
|
||||
blen = b.tell() - pos
|
||||
b.seek(pos + 4, os.SEEK_SET)
|
||||
b.write(struct.pack(b'>h', blen))
|
||||
|
||||
def to_bytes(self):
|
||||
"""Returns the binary representation for this :class:`Alias`."""
|
||||
with io.BytesIO() as b:
|
||||
self._to_fd(b)
|
||||
return b.getvalue()
|
||||
|
||||
def __str__(self):
|
||||
return '<Alias target=%s>' % self.target.filename
|
||||
|
||||
def __repr__(self):
|
||||
values = []
|
||||
if self.appinfo != b'\0\0\0\0':
|
||||
values.append('appinfo=%r' % self.appinfo)
|
||||
if self.version != 2:
|
||||
values.append('version=%r' % self.version)
|
||||
if self.volume is not None:
|
||||
values.append('volume=%r' % self.volume)
|
||||
if self.target is not None:
|
||||
values.append('target=%r' % self.target)
|
||||
if self.extra:
|
||||
values.append('extra=%r' % self.extra)
|
||||
return 'Alias(%s)' % ','.join(values)
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/alias.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/alias.pyc
generated
vendored
Normal file
Binary file not shown.
665
buildfiles/node_modules/dmg-builder/vendor/mac_alias/bookmark.py
generated
vendored
Normal file
665
buildfiles/node_modules/dmg-builder/vendor/mac_alias/bookmark.py
generated
vendored
Normal file
@@ -0,0 +1,665 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# This file implements the Apple "bookmark" format, which is the replacement
|
||||
# for the old-fashioned alias format. The details of this format were
|
||||
# reverse engineered; some things are still not entirely clear.
|
||||
#
|
||||
from __future__ import unicode_literals, print_function
|
||||
|
||||
import struct
|
||||
import uuid
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
import pprint
|
||||
|
||||
try:
|
||||
from urlparse import urljoin
|
||||
except ImportError:
|
||||
from urllib.parse import urljoin
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
from . import osx
|
||||
|
||||
def iteritems(x):
|
||||
return x.iteritems()
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
long = int
|
||||
xrange = range
|
||||
def iteritems(x):
|
||||
return x.items()
|
||||
|
||||
from .utils import *
|
||||
|
||||
BMK_DATA_TYPE_MASK = 0xffffff00
|
||||
BMK_DATA_SUBTYPE_MASK = 0x000000ff
|
||||
|
||||
BMK_STRING = 0x0100
|
||||
BMK_DATA = 0x0200
|
||||
BMK_NUMBER = 0x0300
|
||||
BMK_DATE = 0x0400
|
||||
BMK_BOOLEAN = 0x0500
|
||||
BMK_ARRAY = 0x0600
|
||||
BMK_DICT = 0x0700
|
||||
BMK_UUID = 0x0800
|
||||
BMK_URL = 0x0900
|
||||
BMK_NULL = 0x0a00
|
||||
|
||||
BMK_ST_ZERO = 0x0000
|
||||
BMK_ST_ONE = 0x0001
|
||||
|
||||
BMK_BOOLEAN_ST_FALSE = 0x0000
|
||||
BMK_BOOLEAN_ST_TRUE = 0x0001
|
||||
|
||||
# Subtypes for BMK_NUMBER are really CFNumberType values
|
||||
kCFNumberSInt8Type = 1
|
||||
kCFNumberSInt16Type = 2
|
||||
kCFNumberSInt32Type = 3
|
||||
kCFNumberSInt64Type = 4
|
||||
kCFNumberFloat32Type = 5
|
||||
kCFNumberFloat64Type = 6
|
||||
kCFNumberCharType = 7
|
||||
kCFNumberShortType = 8
|
||||
kCFNumberIntType = 9
|
||||
kCFNumberLongType = 10
|
||||
kCFNumberLongLongType = 11
|
||||
kCFNumberFloatType = 12
|
||||
kCFNumberDoubleType = 13
|
||||
kCFNumberCFIndexType = 14
|
||||
kCFNumberNSIntegerType = 15
|
||||
kCFNumberCGFloatType = 16
|
||||
|
||||
# Resource property flags (from CFURLPriv.h)
|
||||
kCFURLResourceIsRegularFile = 0x00000001
|
||||
kCFURLResourceIsDirectory = 0x00000002
|
||||
kCFURLResourceIsSymbolicLink = 0x00000004
|
||||
kCFURLResourceIsVolume = 0x00000008
|
||||
kCFURLResourceIsPackage = 0x00000010
|
||||
kCFURLResourceIsSystemImmutable = 0x00000020
|
||||
kCFURLResourceIsUserImmutable = 0x00000040
|
||||
kCFURLResourceIsHidden = 0x00000080
|
||||
kCFURLResourceHasHiddenExtension = 0x00000100
|
||||
kCFURLResourceIsApplication = 0x00000200
|
||||
kCFURLResourceIsCompressed = 0x00000400
|
||||
kCFURLResourceIsSystemCompressed = 0x00000400
|
||||
kCFURLCanSetHiddenExtension = 0x00000800
|
||||
kCFURLResourceIsReadable = 0x00001000
|
||||
kCFURLResourceIsWriteable = 0x00002000
|
||||
kCFURLResourceIsExecutable = 0x00004000
|
||||
kCFURLIsAliasFile = 0x00008000
|
||||
kCFURLIsMountTrigger = 0x00010000
|
||||
|
||||
# Volume property flags (from CFURLPriv.h)
|
||||
kCFURLVolumeIsLocal = 0x1 #
|
||||
kCFURLVolumeIsAutomount = 0x2 #
|
||||
kCFURLVolumeDontBrowse = 0x4 #
|
||||
kCFURLVolumeIsReadOnly = 0x8 #
|
||||
kCFURLVolumeIsQuarantined = 0x10
|
||||
kCFURLVolumeIsEjectable = 0x20 #
|
||||
kCFURLVolumeIsRemovable = 0x40 #
|
||||
kCFURLVolumeIsInternal = 0x80 #
|
||||
kCFURLVolumeIsExternal = 0x100 #
|
||||
kCFURLVolumeIsDiskImage = 0x200 #
|
||||
kCFURLVolumeIsFileVault = 0x400
|
||||
kCFURLVolumeIsLocaliDiskMirror = 0x800
|
||||
kCFURLVolumeIsiPod = 0x1000 #
|
||||
kCFURLVolumeIsiDisk = 0x2000
|
||||
kCFURLVolumeIsCD = 0x4000
|
||||
kCFURLVolumeIsDVD = 0x8000
|
||||
kCFURLVolumeIsDeviceFileSystem = 0x10000
|
||||
kCFURLVolumeSupportsPersistentIDs = 0x100000000
|
||||
kCFURLVolumeSupportsSearchFS = 0x200000000
|
||||
kCFURLVolumeSupportsExchange = 0x400000000
|
||||
# reserved 0x800000000
|
||||
kCFURLVolumeSupportsSymbolicLinks = 0x1000000000
|
||||
kCFURLVolumeSupportsDenyModes = 0x2000000000
|
||||
kCFURLVolumeSupportsCopyFile = 0x4000000000
|
||||
kCFURLVolumeSupportsReadDirAttr = 0x8000000000
|
||||
kCFURLVolumeSupportsJournaling = 0x10000000000
|
||||
kCFURLVolumeSupportsRename = 0x20000000000
|
||||
kCFURLVolumeSupportsFastStatFS = 0x40000000000
|
||||
kCFURLVolumeSupportsCaseSensitiveNames = 0x80000000000
|
||||
kCFURLVolumeSupportsCasePreservedNames = 0x100000000000
|
||||
kCFURLVolumeSupportsFLock = 0x200000000000
|
||||
kCFURLVolumeHasNoRootDirectoryTimes = 0x400000000000
|
||||
kCFURLVolumeSupportsExtendedSecurity = 0x800000000000
|
||||
kCFURLVolumeSupports2TBFileSize = 0x1000000000000
|
||||
kCFURLVolumeSupportsHardLinks = 0x2000000000000
|
||||
kCFURLVolumeSupportsMandatoryByteRangeLocks = 0x4000000000000
|
||||
kCFURLVolumeSupportsPathFromID = 0x8000000000000
|
||||
# reserved 0x10000000000000
|
||||
kCFURLVolumeIsJournaling = 0x20000000000000
|
||||
kCFURLVolumeSupportsSparseFiles = 0x40000000000000
|
||||
kCFURLVolumeSupportsZeroRuns = 0x80000000000000
|
||||
kCFURLVolumeSupportsVolumeSizes = 0x100000000000000
|
||||
kCFURLVolumeSupportsRemoteEvents = 0x200000000000000
|
||||
kCFURLVolumeSupportsHiddenFiles = 0x400000000000000
|
||||
kCFURLVolumeSupportsDecmpFSCompression = 0x800000000000000
|
||||
kCFURLVolumeHas64BitObjectIDs = 0x1000000000000000
|
||||
kCFURLVolumePropertyFlagsAll = 0xffffffffffffffff
|
||||
|
||||
BMK_URL_ST_ABSOLUTE = 0x0001
|
||||
BMK_URL_ST_RELATIVE = 0x0002
|
||||
|
||||
# Bookmark keys
|
||||
# = 0x1003
|
||||
kBookmarkPath = 0x1004 # Array of path components
|
||||
kBookmarkCNIDPath = 0x1005 # Array of CNIDs
|
||||
kBookmarkFileProperties = 0x1010 # (CFURL rp flags,
|
||||
# CFURL rp flags asked for,
|
||||
# 8 bytes NULL)
|
||||
kBookmarkFileName = 0x1020
|
||||
kBookmarkFileID = 0x1030
|
||||
kBookmarkFileCreationDate = 0x1040
|
||||
# = 0x1054 # ?
|
||||
# = 0x1055 # ?
|
||||
# = 0x1056 # ?
|
||||
# = 0x1101 # ?
|
||||
# = 0x1102 # ?
|
||||
kBookmarkTOCPath = 0x2000 # A list of (TOC id, ?) pairs
|
||||
kBookmarkVolumePath = 0x2002
|
||||
kBookmarkVolumeURL = 0x2005
|
||||
kBookmarkVolumeName = 0x2010
|
||||
kBookmarkVolumeUUID = 0x2011 # Stored (perversely) as a string
|
||||
kBookmarkVolumeSize = 0x2012
|
||||
kBookmarkVolumeCreationDate = 0x2013
|
||||
kBookmarkVolumeProperties = 0x2020 # (CFURL vp flags,
|
||||
# CFURL vp flags asked for,
|
||||
# 8 bytes NULL)
|
||||
kBookmarkVolumeIsRoot = 0x2030 # True if volume is FS root
|
||||
kBookmarkVolumeBookmark = 0x2040 # Embedded bookmark for disk image (TOC id)
|
||||
kBookmarkVolumeMountPoint = 0x2050 # A URL
|
||||
# = 0x2070
|
||||
kBookmarkContainingFolder = 0xc001 # Index of containing folder in path
|
||||
kBookmarkUserName = 0xc011 # User that created bookmark
|
||||
kBookmarkUID = 0xc012 # UID that created bookmark
|
||||
kBookmarkWasFileReference = 0xd001 # True if the URL was a file reference
|
||||
kBookmarkCreationOptions = 0xd010
|
||||
kBookmarkURLLengths = 0xe003 # See below
|
||||
# = 0xf017 # Localized name?
|
||||
# = 0xf022
|
||||
kBookmarkSecurityExtension = 0xf080
|
||||
# = 0xf081
|
||||
|
||||
# kBookmarkURLLengths is an array that is set if the URL encoded by the
|
||||
# bookmark had a base URL; in that case, each entry is the length of the
|
||||
# base URL in question. Thus a URL
|
||||
#
|
||||
# file:///foo/bar/baz blam/blat.html
|
||||
#
|
||||
# will result in [3, 2], while the URL
|
||||
#
|
||||
# file:///foo bar/baz blam blat.html
|
||||
#
|
||||
# would result in [1, 2, 1, 1]
|
||||
|
||||
|
||||
class Data (object):
|
||||
def __init__(self, bytedata=None):
|
||||
#: The bytes, stored as a byte string
|
||||
self.bytes = bytes(bytedata)
|
||||
|
||||
def __repr__(self):
|
||||
return 'Data(%r)' % self.bytes
|
||||
|
||||
class URL (object):
|
||||
def __init__(self, base, rel=None):
|
||||
if rel is not None:
|
||||
#: The base URL, if any (a :class:`URL`)
|
||||
self.base = base
|
||||
#: The rest of the URL (a string)
|
||||
self.relative = rel
|
||||
else:
|
||||
self.base = None
|
||||
self.relative = base
|
||||
|
||||
@property
|
||||
def absolute(self):
|
||||
"""Return an absolute URL."""
|
||||
if self.base is None:
|
||||
return self.relative
|
||||
else:
|
||||
base_abs = self.base.absolute
|
||||
return urljoin(self.base.absolute, self.relative)
|
||||
|
||||
def __repr__(self):
|
||||
return 'URL(%r)' % self.absolute
|
||||
|
||||
class Bookmark (object):
|
||||
def __init__(self, tocs=None):
|
||||
if tocs is None:
|
||||
#: The TOCs for this Bookmark
|
||||
self.tocs = []
|
||||
else:
|
||||
self.tocs = tocs
|
||||
|
||||
@classmethod
|
||||
def _get_item(cls, data, hdrsize, offset):
|
||||
offset += hdrsize
|
||||
if offset > len(data) - 8:
|
||||
raise ValueError('Offset out of range')
|
||||
|
||||
length,typecode = struct.unpack(b'<II', data[offset:offset+8])
|
||||
|
||||
if len(data) - offset < 8 + length:
|
||||
raise ValueError('Data item truncated')
|
||||
|
||||
databytes = data[offset+8:offset+8+length]
|
||||
|
||||
dsubtype = typecode & BMK_DATA_SUBTYPE_MASK
|
||||
dtype = typecode & BMK_DATA_TYPE_MASK
|
||||
|
||||
if dtype == BMK_STRING:
|
||||
return databytes.decode('utf-8')
|
||||
elif dtype == BMK_DATA:
|
||||
return Data(databytes)
|
||||
elif dtype == BMK_NUMBER:
|
||||
if dsubtype == kCFNumberSInt8Type:
|
||||
return ord(databytes[0])
|
||||
elif dsubtype == kCFNumberSInt16Type:
|
||||
return struct.unpack(b'<h', databytes)[0]
|
||||
elif dsubtype == kCFNumberSInt32Type:
|
||||
return struct.unpack(b'<i', databytes)[0]
|
||||
elif dsubtype == kCFNumberSInt64Type:
|
||||
return struct.unpack(b'<q', databytes)[0]
|
||||
elif dsubtype == kCFNumberFloat32Type:
|
||||
return struct.unpack(b'<f', databytes)[0]
|
||||
elif dsubtype == kCFNumberFloat64Type:
|
||||
return struct.unpack(b'<d', databytes)[0]
|
||||
elif dtype == BMK_DATE:
|
||||
# Yes, dates really are stored as *BIG-endian* doubles; everything
|
||||
# else is little-endian
|
||||
secs = datetime.timedelta(seconds=struct.unpack(b'>d', databytes)[0])
|
||||
return osx_epoch + secs
|
||||
elif dtype == BMK_BOOLEAN:
|
||||
if dsubtype == BMK_BOOLEAN_ST_TRUE:
|
||||
return True
|
||||
elif dsubtype == BMK_BOOLEAN_ST_FALSE:
|
||||
return False
|
||||
elif dtype == BMK_UUID:
|
||||
return uuid.UUID(bytes=databytes)
|
||||
elif dtype == BMK_URL:
|
||||
if dsubtype == BMK_URL_ST_ABSOLUTE:
|
||||
return URL(databytes.decode('utf-8'))
|
||||
elif dsubtype == BMK_URL_ST_RELATIVE:
|
||||
baseoff,reloff = struct.unpack(b'<II', databytes)
|
||||
base = cls._get_item(data, hdrsize, baseoff)
|
||||
rel = cls._get_item(data, hdrsize, reloff)
|
||||
return URL(base, rel)
|
||||
elif dtype == BMK_ARRAY:
|
||||
result = []
|
||||
for aoff in xrange(offset+8,offset+8+length,4):
|
||||
eltoff, = struct.unpack(b'<I', data[aoff:aoff+4])
|
||||
result.append(cls._get_item(data, hdrsize, eltoff))
|
||||
return result
|
||||
elif dtype == BMK_DICT:
|
||||
result = {}
|
||||
for eoff in xrange(offset+8,offset+8+length,8):
|
||||
keyoff,valoff = struct.unpack(b'<II', data[eoff:eoff+8])
|
||||
key = cls._get_item(data, hdrsize, keyoff)
|
||||
val = cls._get_item(data, hdrsize, valoff)
|
||||
result[key] = val
|
||||
return result
|
||||
elif dtype == BMK_NULL:
|
||||
return None
|
||||
|
||||
print('Unknown data type %08x' % typecode)
|
||||
return (typecode, databytes)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data):
|
||||
"""Create a :class:`Bookmark` given byte data."""
|
||||
|
||||
if len(data) < 16:
|
||||
raise ValueError('Not a bookmark file (too short)')
|
||||
|
||||
if isinstance(data, bytearray):
|
||||
data = bytes(data)
|
||||
|
||||
magic,size,dummy,hdrsize = struct.unpack(b'<4sIII', data[0:16])
|
||||
|
||||
if magic != b'book':
|
||||
raise ValueError('Not a bookmark file (bad magic) %r' % magic)
|
||||
|
||||
if hdrsize < 16:
|
||||
raise ValueError('Not a bookmark file (header size too short)')
|
||||
|
||||
if hdrsize > size:
|
||||
raise ValueError('Not a bookmark file (header size too large)')
|
||||
|
||||
if size != len(data):
|
||||
raise ValueError('Not a bookmark file (truncated)')
|
||||
|
||||
tocoffset, = struct.unpack(b'<I', data[hdrsize:hdrsize+4])
|
||||
|
||||
tocs = []
|
||||
|
||||
while tocoffset != 0:
|
||||
tocbase = hdrsize + tocoffset
|
||||
if tocoffset > size - hdrsize \
|
||||
or size - tocbase < 20:
|
||||
raise ValueError('TOC offset out of range')
|
||||
|
||||
tocsize,tocmagic,tocid,nexttoc,toccount \
|
||||
= struct.unpack(b'<IIIII',
|
||||
data[tocbase:tocbase+20])
|
||||
|
||||
if tocmagic != 0xfffffffe:
|
||||
break
|
||||
|
||||
tocsize += 8
|
||||
|
||||
if size - tocbase < tocsize:
|
||||
raise ValueError('TOC truncated')
|
||||
|
||||
if tocsize < 12 * toccount:
|
||||
raise ValueError('TOC entries overrun TOC size')
|
||||
|
||||
toc = {}
|
||||
for n in xrange(0,toccount):
|
||||
ebase = tocbase + 20 + 12 * n
|
||||
eid,eoffset,edummy = struct.unpack(b'<III',
|
||||
data[ebase:ebase+12])
|
||||
|
||||
if eid & 0x80000000:
|
||||
eid = cls._get_item(data, hdrsize, eid & 0x7fffffff)
|
||||
|
||||
toc[eid] = cls._get_item(data, hdrsize, eoffset)
|
||||
|
||||
tocs.append((tocid, toc))
|
||||
|
||||
tocoffset = nexttoc
|
||||
|
||||
return cls(tocs)
|
||||
|
||||
def __getitem__(self, key):
|
||||
for tid,toc in self.tocs:
|
||||
if key in toc:
|
||||
return toc[key]
|
||||
raise KeyError('Key not found')
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if len(self.tocs) == 0:
|
||||
self.tocs = [(1, {})]
|
||||
self.tocs[0][1][key] = value
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Lookup the value for a given key, returning a default if not
|
||||
present."""
|
||||
for tid,toc in self.tocs:
|
||||
if key in toc:
|
||||
return toc[key]
|
||||
return default
|
||||
|
||||
@classmethod
|
||||
def _encode_item(cls, item, offset):
|
||||
if item is True:
|
||||
result = struct.pack(b'<II', 0, BMK_BOOLEAN | BMK_BOOLEAN_ST_TRUE)
|
||||
elif item is False:
|
||||
result = struct.pack(b'<II', 0, BMK_BOOLEAN | BMK_BOOLEAN_ST_FALSE)
|
||||
elif isinstance(item, unicode):
|
||||
encoded = item.encode('utf-8')
|
||||
result = (struct.pack(b'<II', len(encoded), BMK_STRING | BMK_ST_ONE)
|
||||
+ encoded)
|
||||
elif isinstance(item, bytes):
|
||||
result = (struct.pack(b'<II', len(item), BMK_STRING | BMK_ST_ONE)
|
||||
+ item)
|
||||
elif isinstance(item, Data):
|
||||
result = (struct.pack(b'<II', len(item.bytes),
|
||||
BMK_DATA | BMK_ST_ONE)
|
||||
+ bytes(item.bytes))
|
||||
elif isinstance(item, bytearray):
|
||||
result = (struct.pack(b'<II', len(item),
|
||||
BMK_DATA | BMK_ST_ONE)
|
||||
+ bytes(item))
|
||||
elif isinstance(item, int) or isinstance(item, long):
|
||||
if item > -0x80000000 and item < 0x7fffffff:
|
||||
result = struct.pack(b'<IIi', 4,
|
||||
BMK_NUMBER | kCFNumberSInt32Type, item)
|
||||
else:
|
||||
result = struct.pack(b'<IIq', 8,
|
||||
BMK_NUMBER | kCFNumberSInt64Type, item)
|
||||
elif isinstance(item, float):
|
||||
result = struct.pack(b'<IId', 8,
|
||||
BMK_NUMBER | kCFNumberFloat64Type, item)
|
||||
elif isinstance(item, datetime.datetime):
|
||||
secs = item - osx_epoch
|
||||
result = struct.pack(b'<II', 8, BMK_DATE | BMK_ST_ZERO) \
|
||||
+ struct.pack(b'>d', float(secs.total_seconds()))
|
||||
elif isinstance(item, uuid.UUID):
|
||||
result = struct.pack(b'<II', 16, BMK_UUID | BMK_ST_ONE) \
|
||||
+ item.bytes
|
||||
elif isinstance(item, URL):
|
||||
if item.base:
|
||||
baseoff = offset + 16
|
||||
reloff, baseenc = cls._encode_item(item.base, baseoff)
|
||||
xoffset, relenc = cls._encode_item(item.relative, reloff)
|
||||
result = b''.join([
|
||||
struct.pack(b'<IIII', 8, BMK_URL | BMK_URL_ST_RELATIVE,
|
||||
baseoff, reloff),
|
||||
baseenc,
|
||||
relenc])
|
||||
else:
|
||||
encoded = item.relative.encode('utf-8')
|
||||
result = struct.pack(b'<II', len(encoded),
|
||||
BMK_URL | BMK_URL_ST_ABSOLUTE) + encoded
|
||||
elif isinstance(item, list):
|
||||
ioffset = offset + 8 + len(item) * 4
|
||||
result = [struct.pack(b'<II', len(item) * 4, BMK_ARRAY | BMK_ST_ONE)]
|
||||
enc = []
|
||||
for elt in item:
|
||||
result.append(struct.pack(b'<I', ioffset))
|
||||
ioffset, ienc = cls._encode_item(elt, ioffset)
|
||||
enc.append(ienc)
|
||||
result = b''.join(result + enc)
|
||||
elif isinstance(item, dict):
|
||||
ioffset = offset + 8 + len(item) * 8
|
||||
result = [struct.pack(b'<II', len(item) * 8, BMK_DICT | BMK_ST_ONE)]
|
||||
enc = []
|
||||
for k,v in iteritems(item):
|
||||
result.append(struct.pack(b'<I', ioffset))
|
||||
ioffset, ienc = cls._encode_item(k, ioffset)
|
||||
enc.append(ienc)
|
||||
result.append(struct.pack(b'<I', ioffset))
|
||||
ioffset, ienc = cls._encode_item(v, ioffset)
|
||||
enc.append(ienc)
|
||||
result = b''.join(result + enc)
|
||||
elif item is None:
|
||||
result = struct.pack(b'<II', 0, BMK_NULL | BMK_ST_ONE)
|
||||
else:
|
||||
raise ValueError('Unknown item type when encoding: %s' % item)
|
||||
|
||||
offset += len(result)
|
||||
|
||||
# Pad to a multiple of 4 bytes
|
||||
if offset & 3:
|
||||
extra = 4 - (offset & 3)
|
||||
result += b'\0' * extra
|
||||
offset += extra
|
||||
|
||||
return (offset, result)
|
||||
|
||||
def to_bytes(self):
|
||||
"""Convert this :class:`Bookmark` to a byte representation."""
|
||||
|
||||
result = []
|
||||
tocs = []
|
||||
offset = 4 # For the offset to the first TOC
|
||||
|
||||
# Generate the data and build the TOCs
|
||||
for tid,toc in self.tocs:
|
||||
entries = []
|
||||
|
||||
for k,v in iteritems(toc):
|
||||
if isinstance(k, (str, unicode)):
|
||||
noffset = offset
|
||||
voffset, enc = self._encode_item(k, offset)
|
||||
result.append(enc)
|
||||
offset, enc = self._encode_item(v, voffset)
|
||||
result.append(enc)
|
||||
entries.append((noffset | 0x80000000, voffset))
|
||||
else:
|
||||
entries.append((k, offset))
|
||||
offset, enc = self._encode_item(v, offset)
|
||||
result.append(enc)
|
||||
|
||||
# TOC entries must be sorted - CoreServicesInternal does a
|
||||
# binary search to find data
|
||||
entries.sort()
|
||||
|
||||
tocs.append((tid, b''.join([struct.pack(b'<III',k,o,0)
|
||||
for k,o in entries])))
|
||||
|
||||
first_toc_offset = offset
|
||||
|
||||
# Now generate the TOC headers
|
||||
for ndx,toc in enumerate(tocs):
|
||||
tid, data = toc
|
||||
if ndx == len(tocs) - 1:
|
||||
next_offset = 0
|
||||
else:
|
||||
next_offset = offset + 20 + len(data)
|
||||
|
||||
result.append(struct.pack(b'<IIIII', len(data) - 8,
|
||||
0xfffffffe,
|
||||
tid,
|
||||
next_offset,
|
||||
len(data) // 12))
|
||||
result.append(data)
|
||||
|
||||
offset += 20 + len(data)
|
||||
|
||||
# Finally, add the header (and the first TOC offset, which isn't part
|
||||
# of the header, but goes just after it)
|
||||
header = struct.pack(b'<4sIIIQQQQI', b'book',
|
||||
offset + 48,
|
||||
0x10040000,
|
||||
48,
|
||||
0, 0, 0, 0, first_toc_offset)
|
||||
|
||||
result.insert(0, header)
|
||||
|
||||
return b''.join(result)
|
||||
|
||||
@classmethod
|
||||
def for_file(cls, path):
|
||||
"""Construct a :class:`Bookmark` for a given file."""
|
||||
|
||||
# Find the filesystem
|
||||
st = osx.statfs(path)
|
||||
vol_path = st.f_mntonname.decode('utf-8')
|
||||
|
||||
# Grab its attributes
|
||||
attrs = [osx.ATTR_CMN_CRTIME,
|
||||
osx.ATTR_VOL_SIZE
|
||||
| osx.ATTR_VOL_NAME
|
||||
| osx.ATTR_VOL_UUID,
|
||||
0, 0, 0]
|
||||
volinfo = osx.getattrlist(vol_path, attrs, 0)
|
||||
|
||||
vol_crtime = volinfo[0]
|
||||
vol_size = volinfo[1]
|
||||
vol_name = volinfo[2]
|
||||
vol_uuid = volinfo[3]
|
||||
|
||||
# Also grab various attributes of the file
|
||||
attrs = [(osx.ATTR_CMN_OBJTYPE
|
||||
| osx.ATTR_CMN_CRTIME
|
||||
| osx.ATTR_CMN_FILEID), 0, 0, 0, 0]
|
||||
info = osx.getattrlist(path, attrs, osx.FSOPT_NOFOLLOW)
|
||||
|
||||
cnid = info[2]
|
||||
crtime = info[1]
|
||||
|
||||
if info[0] == osx.VREG:
|
||||
flags = kCFURLResourceIsRegularFile
|
||||
elif info[0] == osx.VDIR:
|
||||
flags = kCFURLResourceIsDirectory
|
||||
elif info[0] == osx.VLNK:
|
||||
flags = kCFURLResourceIsSymbolicLink
|
||||
else:
|
||||
flags = kCFURLResourceIsRegularFile
|
||||
|
||||
dirname, filename = os.path.split(path)
|
||||
|
||||
relcount = 0
|
||||
if not os.path.isabs(dirname):
|
||||
curdir = os.getcwd()
|
||||
head, tail = os.path.split(curdir)
|
||||
relcount = 0
|
||||
while head and tail:
|
||||
relcount += 1
|
||||
head, tail = os.path.split(head)
|
||||
dirname = os.path.join(curdir, dirname)
|
||||
|
||||
foldername = os.path.basename(dirname)
|
||||
|
||||
rel_path = os.path.relpath(path, vol_path)
|
||||
|
||||
# Build the path arrays
|
||||
name_path = []
|
||||
cnid_path = []
|
||||
head, tail = os.path.split(rel_path)
|
||||
if not tail:
|
||||
head, tail = os.path.split(head)
|
||||
while head or tail:
|
||||
if head:
|
||||
attrs = [osx.ATTR_CMN_FILEID, 0, 0, 0, 0]
|
||||
info = osx.getattrlist(os.path.join(vol_path, head), attrs, 0)
|
||||
cnid_path.insert(0, info[0])
|
||||
head, tail = os.path.split(head)
|
||||
name_path.insert(0, tail)
|
||||
else:
|
||||
head, tail = os.path.split(head)
|
||||
name_path.append(filename)
|
||||
cnid_path.append(cnid)
|
||||
|
||||
url_lengths = [relcount, len(name_path) - relcount]
|
||||
|
||||
fileprops = Data(struct.pack(b'<QQQ', flags, 0x0f, 0))
|
||||
volprops = Data(struct.pack(b'<QQQ', 0x81 | kCFURLVolumeSupportsPersistentIDs,
|
||||
0x13ef | kCFURLVolumeSupportsPersistentIDs, 0))
|
||||
|
||||
toc = {
|
||||
kBookmarkPath: name_path,
|
||||
kBookmarkCNIDPath: cnid_path,
|
||||
kBookmarkFileCreationDate: crtime,
|
||||
kBookmarkFileProperties: fileprops,
|
||||
kBookmarkContainingFolder: len(name_path) - 2,
|
||||
kBookmarkVolumePath: vol_path,
|
||||
kBookmarkVolumeIsRoot: vol_path == '/',
|
||||
kBookmarkVolumeURL: URL('file://' + vol_path),
|
||||
kBookmarkVolumeName: vol_name,
|
||||
kBookmarkVolumeSize: vol_size,
|
||||
kBookmarkVolumeCreationDate: vol_crtime,
|
||||
kBookmarkVolumeUUID: str(vol_uuid).upper(),
|
||||
kBookmarkVolumeProperties: volprops,
|
||||
kBookmarkCreationOptions: 512,
|
||||
kBookmarkWasFileReference: True,
|
||||
kBookmarkUserName: 'unknown',
|
||||
kBookmarkUID: 99,
|
||||
}
|
||||
|
||||
if relcount:
|
||||
toc[kBookmarkURLLengths] = url_lengths
|
||||
|
||||
return Bookmark([(1, toc)])
|
||||
|
||||
def __repr__(self):
|
||||
result = ['Bookmark([']
|
||||
for tid,toc in self.tocs:
|
||||
result.append('(0x%x, {\n' % tid)
|
||||
for k,v in iteritems(toc):
|
||||
if isinstance(k, (str, unicode)):
|
||||
kf = repr(k)
|
||||
else:
|
||||
kf = '0x%04x' % k
|
||||
result.append(' %s: %r\n' % (kf, v))
|
||||
result.append('}),\n')
|
||||
result.append('])')
|
||||
|
||||
return ''.join(result)
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/bookmark.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/bookmark.pyc
generated
vendored
Normal file
Binary file not shown.
827
buildfiles/node_modules/dmg-builder/vendor/mac_alias/osx.py
generated
vendored
Normal file
827
buildfiles/node_modules/dmg-builder/vendor/mac_alias/osx.py
generated
vendored
Normal file
@@ -0,0 +1,827 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from ctypes import *
|
||||
import struct
|
||||
import os
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
from .utils import *
|
||||
|
||||
libc = cdll.LoadLibrary('/usr/lib/libc.dylib')
|
||||
|
||||
# Constants
|
||||
FSOPT_NOFOLLOW = 0x00000001
|
||||
FSOPT_NOINMEMUPDATE = 0x00000002
|
||||
FSOPT_REPORT_FULLSIZE = 0x00000004
|
||||
FSOPT_PACK_INVAL_ATTRS = 0x00000008
|
||||
|
||||
VOL_CAPABILITIES_FORMAT = 0
|
||||
VOL_CAPABILITIES_INTERFACES = 1
|
||||
|
||||
VOL_CAP_FMT_PERSISTENTOBJECTIDS = 0x00000001
|
||||
VOL_CAP_FMT_SYMBOLICLINKS = 0x00000002
|
||||
VOL_CAP_FMT_HARDLINKS = 0x00000004
|
||||
VOL_CAP_FMT_JOURNAL = 0x00000008
|
||||
VOL_CAP_FMT_JOURNAL_ACTIVE = 0x00000010
|
||||
VOL_CAP_FMT_NO_ROOT_TIMES = 0x00000020
|
||||
VOL_CAP_FMT_SPARSE_FILES = 0x00000040
|
||||
VOL_CAP_FMT_ZERO_RUNS = 0x00000080
|
||||
VOL_CAP_FMT_CASE_SENSITIVE = 0x00000100
|
||||
VOL_CAP_FMT_CASE_PRESERVING = 0x00000200
|
||||
VOL_CAP_FMT_FAST_STATFS = 0x00000400
|
||||
VOL_CAP_FMT_2TB_FILESIZE = 0x00000800
|
||||
VOL_CAP_FMT_OPENDENYMODES = 0x00001000
|
||||
VOL_CAP_FMT_HIDDEN_FILES = 0x00002000
|
||||
VOL_CAP_FMT_PATH_FROM_ID = 0x00004000
|
||||
VOL_CAP_FMT_NO_VOLUME_SIZES = 0x00008000
|
||||
VOL_CAP_FMT_DECMPFS_COMPRESSION = 0x00010000
|
||||
VOL_CAP_FMT_64BIT_OBJECT_IDS = 0x00020000
|
||||
|
||||
VOL_CAP_INT_SEARCHFS = 0x00000001
|
||||
VOL_CAP_INT_ATTRLIST = 0x00000002
|
||||
VOL_CAP_INT_NFSEXPORT = 0x00000004
|
||||
VOL_CAP_INT_READDIRATTR = 0x00000008
|
||||
VOL_CAP_INT_EXCHANGEDATA = 0x00000010
|
||||
VOL_CAP_INT_COPYFILE = 0x00000020
|
||||
VOL_CAP_INT_ALLOCATE = 0x00000040
|
||||
VOL_CAP_INT_VOL_RENAME = 0x00000080
|
||||
VOL_CAP_INT_ADVLOCK = 0x00000100
|
||||
VOL_CAP_INT_FLOCK = 0x00000200
|
||||
VOL_CAP_INT_EXTENDED_SECURITY = 0x00000400
|
||||
VOL_CAP_INT_USERACCESS = 0x00000800
|
||||
VOL_CAP_INT_MANLOCK = 0x00001000
|
||||
VOL_CAP_INT_NAMEDSTREAMS = 0x00002000
|
||||
VOL_CAP_INT_EXTENDED_ATTR = 0x00004000
|
||||
|
||||
ATTR_CMN_NAME = 0x00000001
|
||||
ATTR_CMN_DEVID = 0x00000002
|
||||
ATTR_CMN_FSID = 0x00000004
|
||||
ATTR_CMN_OBJTYPE = 0x00000008
|
||||
ATTR_CMN_OBJTAG = 0x00000010
|
||||
ATTR_CMN_OBJID = 0x00000020
|
||||
ATTR_CMN_OBJPERMANENTID = 0x00000040
|
||||
ATTR_CMN_PAROBJID = 0x00000080
|
||||
ATTR_CMN_SCRIPT = 0x00000100
|
||||
ATTR_CMN_CRTIME = 0x00000200
|
||||
ATTR_CMN_MODTIME = 0x00000400
|
||||
ATTR_CMN_CHGTIME = 0x00000800
|
||||
ATTR_CMN_ACCTIME = 0x00001000
|
||||
ATTR_CMN_BKUPTIME = 0x00002000
|
||||
ATTR_CMN_FNDRINFO = 0x00004000
|
||||
ATTR_CMN_OWNERID = 0x00008000
|
||||
ATTR_CMN_GRPID = 0x00010000
|
||||
ATTR_CMN_ACCESSMASK = 0x00020000
|
||||
ATTR_CMN_FLAGS = 0x00040000
|
||||
ATTR_CMN_USERACCESS = 0x00200000
|
||||
ATTR_CMN_EXTENDED_SECURITY = 0x00400000
|
||||
ATTR_CMN_UUID = 0x00800000
|
||||
ATTR_CMN_GRPUUID = 0x01000000
|
||||
ATTR_CMN_FILEID = 0x02000000
|
||||
ATTR_CMN_PARENTID = 0x04000000
|
||||
ATTR_CMN_FULLPATH = 0x08000000
|
||||
ATTR_CMN_ADDEDTIME = 0x10000000
|
||||
ATTR_CMN_RETURNED_ATTRS = 0x80000000
|
||||
ATTR_CMN_ALL_ATTRS = 0x9fe7ffff
|
||||
|
||||
ATTR_VOL_FSTYPE = 0x00000001
|
||||
ATTR_VOL_SIGNATURE = 0x00000002
|
||||
ATTR_VOL_SIZE = 0x00000004
|
||||
ATTR_VOL_SPACEFREE = 0x00000008
|
||||
ATTR_VOL_SPACEAVAIL = 0x00000010
|
||||
ATTR_VOL_MINALLOCATION = 0x00000020
|
||||
ATTR_VOL_ALLOCATIONCLUMP = 0x00000040
|
||||
ATTR_VOL_IOBLOCKSIZE = 0x00000080
|
||||
ATTR_VOL_OBJCOUNT = 0x00000100
|
||||
ATTR_VOL_FILECOUNT = 0x00000200
|
||||
ATTR_VOL_DIRCOUNT = 0x00000400
|
||||
ATTR_VOL_MAXOBJCOUNT = 0x00000800
|
||||
ATTR_VOL_MOUNTPOINT = 0x00001000
|
||||
ATTR_VOL_NAME = 0x00002000
|
||||
ATTR_VOL_MOUNTFLAGS = 0x00004000
|
||||
ATTR_VOL_MOUNTEDDEVICE = 0x00008000
|
||||
ATTR_VOL_ENCODINGSUSED = 0x00010000
|
||||
ATTR_VOL_CAPABILITIES = 0x00020000
|
||||
ATTR_VOL_UUID = 0x00040000
|
||||
ATTR_VOL_ATTRIBUTES = 0x40000000
|
||||
ATTR_VOL_INFO = 0x80000000
|
||||
ATTR_VOL_ALL_ATTRS = 0xc007ffff
|
||||
|
||||
ATTR_DIR_LINKCOUNT = 0x00000001
|
||||
ATTR_DIR_ENTRYCOUNT = 0x00000002
|
||||
ATTR_DIR_MOUNTSTATUS = 0x00000004
|
||||
DIR_MNTSTATUS_MNTPOINT = 0x00000001
|
||||
DIR_MNTSTATUS_TRIGGER = 0x00000002
|
||||
ATTR_DIR_ALL_ATTRS = 0x00000007
|
||||
|
||||
ATTR_FILE_LINKCOUNT = 0x00000001
|
||||
ATTR_FILE_TOTALSIZE = 0x00000002
|
||||
ATTR_FILE_ALLOCSIZE = 0x00000004
|
||||
ATTR_FILE_IOBLOCKSIZE = 0x00000008
|
||||
ATTR_FILE_DEVTYPE = 0x00000020
|
||||
ATTR_FILE_DATALENGTH = 0x00000200
|
||||
ATTR_FILE_DATAALLOCSIZE = 0x00000400
|
||||
ATTR_FILE_RSRCLENGTH = 0x00001000
|
||||
ATTR_FILE_RSRCALLOCSIZE = 0x00002000
|
||||
|
||||
ATTR_FILE_ALL_ATTRS = 0x0000362f
|
||||
|
||||
ATTR_FORK_TOTALSIZE = 0x00000001
|
||||
ATTR_FORK_ALLOCSIZE = 0x00000002
|
||||
ATTR_FORK_ALL_ATTRS = 0x00000003
|
||||
|
||||
# These can't be used
|
||||
ATTR_FILE_FORKCOUNT = 0x00000080
|
||||
ATTR_FILE_FORKLIST = 0x00000100
|
||||
ATTR_CMN_NAMEDATTRCOUNT = 0x00080000
|
||||
ATTR_CMN_NAMEDATTRLIST = 0x00100000
|
||||
ATTR_FILE_DATAEXTENTS = 0x00000800
|
||||
ATTR_FILE_RSRCEXTENTS = 0x00004000
|
||||
ATTR_FILE_CLUMPSIZE = 0x00000010
|
||||
ATTR_FILE_FILETYPE = 0x00000040
|
||||
|
||||
class attrlist(Structure):
|
||||
_fields_ = [('bitmapcount', c_ushort),
|
||||
('reserved', c_ushort),
|
||||
('commonattr', c_uint),
|
||||
('volattr', c_uint),
|
||||
('dirattr', c_uint),
|
||||
('fileattr', c_uint),
|
||||
('forkattr', c_uint)]
|
||||
|
||||
class attribute_set_t(Structure):
|
||||
_fields_ = [('commonattr', c_uint),
|
||||
('volattr', c_uint),
|
||||
('dirattr', c_uint),
|
||||
('fileattr', c_uint),
|
||||
('forkattr', c_uint)]
|
||||
|
||||
class fsobj_id_t(Structure):
|
||||
_fields_ = [('fid_objno', c_uint),
|
||||
('fid_generation', c_uint)]
|
||||
|
||||
class timespec(Structure):
|
||||
_fields_ = [('tv_sec', c_long),
|
||||
('tv_nsec', c_long)]
|
||||
|
||||
class attrreference_t(Structure):
|
||||
_fields_ = [('attr_dataoffset', c_int),
|
||||
('attr_length', c_uint)]
|
||||
|
||||
class fsid_t(Structure):
|
||||
_fields_ = [('val', c_uint * 2)]
|
||||
|
||||
class guid_t(Structure):
|
||||
_fields_ = [('g_guid', c_byte*16)]
|
||||
|
||||
class kauth_ace(Structure):
|
||||
_fields_ = [('ace_applicable', guid_t),
|
||||
('ace_flags', c_uint)]
|
||||
|
||||
class kauth_acl(Structure):
|
||||
_fields_ = [('acl_entrycount', c_uint),
|
||||
('acl_flags', c_uint),
|
||||
('acl_ace', kauth_ace * 128)]
|
||||
|
||||
class kauth_filesec(Structure):
|
||||
_fields_ = [('fsec_magic', c_uint),
|
||||
('fsec_owner', guid_t),
|
||||
('fsec_group', guid_t),
|
||||
('fsec_acl', kauth_acl)]
|
||||
|
||||
class diskextent(Structure):
|
||||
_fields_ = [('startblock', c_uint),
|
||||
('blockcount', c_uint)]
|
||||
|
||||
OSType = c_uint
|
||||
UInt16 = c_ushort
|
||||
SInt16 = c_short
|
||||
SInt32 = c_int
|
||||
|
||||
class Point(Structure):
|
||||
_fields_ = [('x', SInt16),
|
||||
('y', SInt16)]
|
||||
class Rect(Structure):
|
||||
_fields_ = [('x', SInt16),
|
||||
('y', SInt16),
|
||||
('w', SInt16),
|
||||
('h', SInt16)]
|
||||
class FileInfo(Structure):
|
||||
_fields_ = [('fileType', OSType),
|
||||
('fileCreator', OSType),
|
||||
('finderFlags', UInt16),
|
||||
('location', Point),
|
||||
('reservedField', UInt16),
|
||||
('reserved1', SInt16 * 4),
|
||||
('extendedFinderFlags', UInt16),
|
||||
('reserved2', SInt16),
|
||||
('putAwayFolderID', SInt32)]
|
||||
class FolderInfo(Structure):
|
||||
_fields_ = [('windowBounds', Rect),
|
||||
('finderFlags', UInt16),
|
||||
('location', Point),
|
||||
('reservedField', UInt16),
|
||||
('scrollPosition', Point),
|
||||
('reserved1', SInt32),
|
||||
('extendedFinderFlags', UInt16),
|
||||
('reserved2', SInt16),
|
||||
('putAwayFolderID', SInt32)]
|
||||
class FinderInfo(Union):
|
||||
_fields_ = [('fileInfo', FileInfo),
|
||||
('folderInfo', FolderInfo)]
|
||||
|
||||
extentrecord = diskextent * 8
|
||||
|
||||
vol_capabilities_set_t = c_uint * 4
|
||||
|
||||
class vol_capabilities_attr_t(Structure):
|
||||
_fields_ = [('capabilities', vol_capabilities_set_t),
|
||||
('valid', vol_capabilities_set_t)]
|
||||
|
||||
class vol_attributes_attr_t(Structure):
|
||||
_fields_ = [('validattr', attribute_set_t),
|
||||
('nativeattr', attribute_set_t)]
|
||||
|
||||
dev_t = c_uint
|
||||
|
||||
fsobj_type_t = c_uint
|
||||
|
||||
VNON = 0
|
||||
VREG = 1
|
||||
VDIR = 2
|
||||
VBLK = 3
|
||||
VCHR = 4
|
||||
VLNK = 5
|
||||
VSOCK = 6
|
||||
VFIFO = 7
|
||||
VBAD = 8
|
||||
VSTR = 9
|
||||
VCPLX = 10
|
||||
|
||||
fsobj_tag_t = c_uint
|
||||
|
||||
VT_NON = 0
|
||||
VT_UFS = 1
|
||||
VT_NFS = 2
|
||||
VT_MFS = 3
|
||||
VT_MSDOSFS = 4
|
||||
VT_LFS = 5
|
||||
VT_LOFS = 6
|
||||
VT_FDESC = 7
|
||||
VT_PORTAL = 8
|
||||
VT_NULL = 9
|
||||
VT_UMAP = 10
|
||||
VT_KERNFS = 11
|
||||
VT_PROCFS = 12
|
||||
VT_AFS = 13
|
||||
VT_ISOFS = 14
|
||||
VT_UNION = 15
|
||||
VT_HFS = 16
|
||||
VT_ZFS = 17
|
||||
VT_DEVFS = 18
|
||||
VT_WEBDAV = 19
|
||||
VT_UDF = 20
|
||||
VT_AFP = 21
|
||||
VT_CDDA = 22
|
||||
VT_CIFS = 23
|
||||
VT_OTHER = 24
|
||||
|
||||
fsfile_type_t = c_uint
|
||||
fsvolid_t = c_uint
|
||||
text_encoding_t = c_uint
|
||||
uid_t = c_uint
|
||||
gid_t = c_uint
|
||||
int32_t = c_int
|
||||
uint32_t = c_uint
|
||||
int64_t = c_longlong
|
||||
uint64_t = c_ulonglong
|
||||
off_t = c_long
|
||||
size_t = c_ulong
|
||||
uuid_t = c_byte*16
|
||||
|
||||
NAME_MAX = 255
|
||||
PATH_MAX = 1024
|
||||
|
||||
class struct_statfs(Structure):
|
||||
_fields_ = [('f_bsize', uint32_t),
|
||||
('f_iosize', int32_t),
|
||||
('f_blocks', uint64_t),
|
||||
('f_bfree', uint64_t),
|
||||
('f_bavail', uint64_t),
|
||||
('f_files', uint64_t),
|
||||
('f_ffree', uint64_t),
|
||||
('f_fsid', fsid_t),
|
||||
('f_owner', uid_t),
|
||||
('f_type', uint32_t),
|
||||
('f_flags', uint32_t),
|
||||
('f_fssubtype', uint32_t),
|
||||
('f_fstypename', c_char * 16),
|
||||
('f_mntonname', c_char * PATH_MAX),
|
||||
('f_mntfromname', c_char * PATH_MAX),
|
||||
('f_reserved', uint32_t * 8)]
|
||||
|
||||
# Calculate the maximum number of bytes required for the attribute buffer
|
||||
_attr_info = (
|
||||
# Common attributes
|
||||
(0, ATTR_CMN_RETURNED_ATTRS, sizeof(attribute_set_t)),
|
||||
(0, ATTR_CMN_NAME, sizeof(attrreference_t) + NAME_MAX * 3 + 1),
|
||||
(0, ATTR_CMN_DEVID, sizeof(dev_t)),
|
||||
(0, ATTR_CMN_FSID, sizeof(fsid_t)),
|
||||
(0, ATTR_CMN_OBJTYPE, sizeof(fsobj_type_t)),
|
||||
(0, ATTR_CMN_OBJTAG, sizeof(fsobj_tag_t)),
|
||||
(0, ATTR_CMN_OBJPERMANENTID, sizeof(fsobj_id_t)),
|
||||
(0, ATTR_CMN_PAROBJID, sizeof(fsobj_id_t)),
|
||||
(0, ATTR_CMN_SCRIPT, sizeof(text_encoding_t)),
|
||||
(0, ATTR_CMN_CRTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_MODTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_CHGTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_ACCTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_BKUPTIME, sizeof(timespec)),
|
||||
(0, ATTR_CMN_FNDRINFO, sizeof(FinderInfo)),
|
||||
(0, ATTR_CMN_OWNERID, sizeof(uid_t)),
|
||||
(0, ATTR_CMN_GRPID, sizeof(gid_t)),
|
||||
(0, ATTR_CMN_ACCESSMASK, sizeof(uint32_t)),
|
||||
(0, ATTR_CMN_NAMEDATTRCOUNT, None),
|
||||
(0, ATTR_CMN_NAMEDATTRLIST, None),
|
||||
(0, ATTR_CMN_FLAGS, sizeof(uint32_t)),
|
||||
(0, ATTR_CMN_USERACCESS, sizeof(uint32_t)),
|
||||
(0, ATTR_CMN_EXTENDED_SECURITY, sizeof(attrreference_t) + sizeof(kauth_filesec)),
|
||||
(0, ATTR_CMN_UUID, sizeof(guid_t)),
|
||||
(0, ATTR_CMN_GRPUUID, sizeof(guid_t)),
|
||||
(0, ATTR_CMN_FILEID, sizeof(uint64_t)),
|
||||
(0, ATTR_CMN_PARENTID, sizeof(uint64_t)),
|
||||
(0, ATTR_CMN_FULLPATH, sizeof(attrreference_t) + PATH_MAX),
|
||||
(0, ATTR_CMN_ADDEDTIME, sizeof(timespec)),
|
||||
|
||||
# Volume attributes
|
||||
(1, ATTR_VOL_FSTYPE, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_SIGNATURE, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_SIZE, sizeof(off_t)),
|
||||
(1, ATTR_VOL_SPACEFREE, sizeof(off_t)),
|
||||
(1, ATTR_VOL_SPACEAVAIL, sizeof(off_t)),
|
||||
(1, ATTR_VOL_MINALLOCATION, sizeof(off_t)),
|
||||
(1, ATTR_VOL_ALLOCATIONCLUMP, sizeof(off_t)),
|
||||
(1, ATTR_VOL_IOBLOCKSIZE, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_OBJCOUNT, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_FILECOUNT, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_DIRCOUNT, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_MAXOBJCOUNT, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_MOUNTPOINT, sizeof(attrreference_t) + PATH_MAX),
|
||||
(1, ATTR_VOL_NAME, sizeof(attrreference_t) + NAME_MAX + 1),
|
||||
(1, ATTR_VOL_MOUNTFLAGS, sizeof(uint32_t)),
|
||||
(1, ATTR_VOL_MOUNTEDDEVICE, sizeof(attrreference_t) + PATH_MAX),
|
||||
(1, ATTR_VOL_ENCODINGSUSED, sizeof(c_ulonglong)),
|
||||
(1, ATTR_VOL_CAPABILITIES, sizeof(vol_capabilities_attr_t)),
|
||||
(1, ATTR_VOL_UUID, sizeof(uuid_t)),
|
||||
(1, ATTR_VOL_ATTRIBUTES, sizeof(vol_attributes_attr_t)),
|
||||
|
||||
# Directory attributes
|
||||
(2, ATTR_DIR_LINKCOUNT, sizeof(uint32_t)),
|
||||
(2, ATTR_DIR_ENTRYCOUNT, sizeof(uint32_t)),
|
||||
(2, ATTR_DIR_MOUNTSTATUS, sizeof(uint32_t)),
|
||||
|
||||
# File attributes
|
||||
(3, ATTR_FILE_LINKCOUNT, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_TOTALSIZE, sizeof(off_t)),
|
||||
(3, ATTR_FILE_ALLOCSIZE, sizeof(off_t)),
|
||||
(3, ATTR_FILE_IOBLOCKSIZE, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_CLUMPSIZE, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_DEVTYPE, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_FILETYPE, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_FORKCOUNT, sizeof(uint32_t)),
|
||||
(3, ATTR_FILE_FORKLIST, None),
|
||||
(3, ATTR_FILE_DATALENGTH, sizeof(off_t)),
|
||||
(3, ATTR_FILE_DATAALLOCSIZE, sizeof(off_t)),
|
||||
(3, ATTR_FILE_DATAEXTENTS, sizeof(extentrecord)),
|
||||
(3, ATTR_FILE_RSRCLENGTH, sizeof(off_t)),
|
||||
(3, ATTR_FILE_RSRCALLOCSIZE, sizeof(off_t)),
|
||||
(3, ATTR_FILE_RSRCEXTENTS, sizeof(extentrecord)),
|
||||
|
||||
# Fork attributes
|
||||
(4, ATTR_FORK_TOTALSIZE, sizeof(off_t)),
|
||||
(4, ATTR_FORK_ALLOCSIZE, sizeof(off_t))
|
||||
)
|
||||
|
||||
def _attrbuf_size(attrs):
|
||||
size = 4
|
||||
for entry in _attr_info:
|
||||
if attrs[entry[0]] & entry[1]:
|
||||
if entry[2] is None:
|
||||
raise ValueError('Unsupported attribute (%u, %x)'
|
||||
% (entry[0], entry[1]))
|
||||
size += entry[2]
|
||||
return size
|
||||
|
||||
_getattrlist = libc.getattrlist
|
||||
_getattrlist.argtypes = [c_char_p, POINTER(attrlist), c_void_p, c_ulong, c_ulong]
|
||||
_getattrlist.restype = c_int
|
||||
|
||||
_fgetattrlist = libc.fgetattrlist
|
||||
_fgetattrlist.argtypes = [c_int, POINTER(attrlist), c_void_p, c_ulong, c_ulong]
|
||||
_fgetattrlist.restype = c_int
|
||||
|
||||
_statfs = libc['statfs$INODE64']
|
||||
_statfs.argtypes = [c_char_p, POINTER(struct_statfs)]
|
||||
_statfs.restype = c_int
|
||||
|
||||
_fstatfs = libc['fstatfs$INODE64']
|
||||
_fstatfs.argtypes = [c_int, POINTER(struct_statfs)]
|
||||
_fstatfs.restype = c_int
|
||||
|
||||
def _datetime_from_timespec(ts):
|
||||
td = datetime.timedelta(seconds=ts.tv_sec + 1.0e-9 * ts.tv_nsec)
|
||||
return unix_epoch + td
|
||||
|
||||
def _decode_utf8_nul(sz):
|
||||
nul = sz.find(b'\0')
|
||||
if nul > -1:
|
||||
sz = sz[:nul]
|
||||
return sz.decode('utf-8')
|
||||
|
||||
def _decode_attrlist_result(buf, attrs, options):
|
||||
result = []
|
||||
|
||||
assert len(buf) >= 4
|
||||
total_size = uint32_t.from_buffer(buf, 0).value
|
||||
assert total_size <= len(buf)
|
||||
|
||||
offset = 4
|
||||
|
||||
# Common attributes
|
||||
if attrs[0] & ATTR_CMN_RETURNED_ATTRS:
|
||||
a = attribute_set_t.from_buffer(buf, offset)
|
||||
result.append(a)
|
||||
offset += sizeof (attribute_set_t)
|
||||
if not (options & FSOPT_PACK_INVAL_ATTRS):
|
||||
attrs = [a.commonattr, a.volattr, a.dirattr, a.fileattr, a.forkattr]
|
||||
if attrs[0] & ATTR_CMN_NAME:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
name = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(name)
|
||||
if attrs[0] & ATTR_CMN_DEVID:
|
||||
a = dev_t.from_buffer(buf, offset)
|
||||
offset += sizeof(dev_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_FSID:
|
||||
a = fsid_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsid_t)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_OBJTYPE:
|
||||
a = fsobj_type_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_type_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_OBJTAG:
|
||||
a = fsobj_tag_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_tag_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_OBJID:
|
||||
a = fsobj_id_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_id_t)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_OBJPERMANENTID:
|
||||
a = fsobj_id_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_id_t)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_PAROBJID:
|
||||
a = fsobj_id_t.from_buffer(buf, offset)
|
||||
offset += sizeof(fsobj_id_t)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_SCRIPT:
|
||||
a = text_encoding_t.from_buffer(buf, offset)
|
||||
offset += sizeof(text_encoding_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_CRTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_MODTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_CHGTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_ACCTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_BKUPTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
if attrs[0] & ATTR_CMN_FNDRINFO:
|
||||
a = FinderInfo.from_buffer(buf, offset)
|
||||
offset += sizeof(FinderInfo)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_OWNERID:
|
||||
a = uid_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uid_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_GRPID:
|
||||
a = gid_t.from_buffer(buf, offset)
|
||||
offset += sizeof(gid_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_ACCESSMASK:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_FLAGS:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_USERACCESS:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_EXTENDED_SECURITY:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
offset += sizeof(attrreference_t)
|
||||
ec = uint32_t.from_buffer(buf, ofs + 36).value
|
||||
class kauth_acl(Structure):
|
||||
_fields_ = [('acl_entrycount', c_uint),
|
||||
('acl_flags', c_uint),
|
||||
('acl_ace', kauth_ace * ec)]
|
||||
class kauth_filesec(Structure):
|
||||
_fields_ = [('fsec_magic', c_uint),
|
||||
('fsec_owner', guid_t),
|
||||
('fsec_group', guid_t),
|
||||
('fsec_acl', kauth_acl)]
|
||||
a = kauth_filesec.from_buffer(buf, ofs)
|
||||
result.append(a)
|
||||
if attrs[0] & ATTR_CMN_UUID:
|
||||
result.append(uuid.UUID(bytes=buf[offset:offset+16]))
|
||||
offset += sizeof(guid_t)
|
||||
if attrs[0] & ATTR_CMN_GRPUUID:
|
||||
result.append(uuid.UUID(bytes=buf[offset:offset+16]))
|
||||
offset += sizeof(guid_t)
|
||||
if attrs[0] & ATTR_CMN_FILEID:
|
||||
a = uint64_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint64_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_PARENTID:
|
||||
a = uint64_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint64_t)
|
||||
result.append(a.value)
|
||||
if attrs[0] & ATTR_CMN_FULLPATH:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(path)
|
||||
if attrs[0] & ATTR_CMN_ADDEDTIME:
|
||||
a = timespec.from_buffer(buf, offset)
|
||||
offset += sizeof(timespec)
|
||||
result.append(_datetime_from_timespec(a))
|
||||
|
||||
# Volume attributes
|
||||
if attrs[1] & ATTR_VOL_FSTYPE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_SIGNATURE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_SIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_SPACEFREE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_SPACEAVAIL:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_MINALLOCATION:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_ALLOCATIONCLUMP:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_IOBLOCKSIZE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_OBJCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_FILECOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_DIRCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_MAXOBJCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_MOUNTPOINT:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(path)
|
||||
if attrs[1] & ATTR_VOL_NAME:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
name = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(name)
|
||||
if attrs[1] & ATTR_VOL_MOUNTFLAGS:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_MOUNTEDDEVICE:
|
||||
a = attrreference_t.from_buffer(buf, offset)
|
||||
ofs = offset + a.attr_dataoffset
|
||||
path = _decode_utf8_nul(buf[ofs:ofs+a.attr_length])
|
||||
offset += sizeof (attrreference_t)
|
||||
result.append(path)
|
||||
if attrs[1] & ATTR_VOL_ENCODINGSUSED:
|
||||
a = c_ulonglong.from_buffer(buf, offset)
|
||||
offset += sizeof(c_ulonglong)
|
||||
result.append(a.value)
|
||||
if attrs[1] & ATTR_VOL_CAPABILITIES:
|
||||
a = vol_capabilities_attr_t.from_buffer(buf, offset)
|
||||
offset += sizeof(vol_capabilities_attr_t)
|
||||
result.append(a)
|
||||
if attrs[1] & ATTR_VOL_UUID:
|
||||
result.append(uuid.UUID(bytes=buf[offset:offset+16]))
|
||||
offset += sizeof(uuid_t)
|
||||
if attrs[1] & ATTR_VOL_ATTRIBUTES:
|
||||
a = vol_attributes_attr_t.from_buffer(buf, offset)
|
||||
offset += sizeof(vol_attributes_attr_t)
|
||||
result.append(a)
|
||||
|
||||
# Directory attributes
|
||||
if attrs[2] & ATTR_DIR_LINKCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[2] & ATTR_DIR_ENTRYCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[2] & ATTR_DIR_MOUNTSTATUS:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
|
||||
# File attributes
|
||||
if attrs[3] & ATTR_FILE_LINKCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_TOTALSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_ALLOCSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_IOBLOCKSIZE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_CLUMPSIZE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_DEVTYPE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_FILETYPE:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_FORKCOUNT:
|
||||
a = uint32_t.from_buffer(buf, offset)
|
||||
offset += sizeof(uint32_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_DATALENGTH:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_DATAALLOCSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_DATAEXTENTS:
|
||||
a = extentrecord.from_buffer(buf, offset)
|
||||
offset += sizeof(extentrecord)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_RSRCLENGTH:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_RSRCALLOCSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[3] & ATTR_FILE_RSRCEXTENTS:
|
||||
a = extentrecord.from_buffer(buf, offset)
|
||||
offset += sizeof(extentrecord)
|
||||
result.append(a.value)
|
||||
|
||||
# Fork attributes
|
||||
if attrs[4] & ATTR_FORK_TOTALSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
if attrs[4] & ATTR_FORK_ALLOCSIZE:
|
||||
a = off_t.from_buffer(buf, offset)
|
||||
offset += sizeof(off_t)
|
||||
result.append(a.value)
|
||||
|
||||
return result
|
||||
|
||||
# Sadly, ctypes.get_errno() seems not to work
|
||||
__error = libc.__error
|
||||
__error.restype = POINTER(c_int)
|
||||
|
||||
def _get_errno():
|
||||
return __error().contents.value
|
||||
|
||||
def getattrlist(path, attrs, options):
|
||||
if not isinstance(path, bytes):
|
||||
path = path.encode('utf-8')
|
||||
attrs = list(attrs)
|
||||
if attrs[1]:
|
||||
attrs[1] |= ATTR_VOL_INFO
|
||||
alist = attrlist(bitmapcount=5,
|
||||
commonattr=attrs[0],
|
||||
volattr=attrs[1],
|
||||
dirattr=attrs[2],
|
||||
fileattr=attrs[3],
|
||||
forkattr=attrs[4])
|
||||
|
||||
bufsize = _attrbuf_size(attrs)
|
||||
buf = create_string_buffer(bufsize)
|
||||
|
||||
ret = _getattrlist(path, byref(alist), buf, bufsize,
|
||||
options | FSOPT_REPORT_FULLSIZE)
|
||||
|
||||
if ret < 0:
|
||||
err = _get_errno()
|
||||
raise OSError(err, os.strerror(err), path)
|
||||
|
||||
return _decode_attrlist_result(buf, attrs, options)
|
||||
|
||||
def fgetattrlist(fd, attrs, options):
|
||||
if hasattr(fd, 'fileno'):
|
||||
fd = fd.fileno()
|
||||
attrs = list(attrs)
|
||||
if attrs[1]:
|
||||
attrs[1] |= ATTR_VOL_INFO
|
||||
alist = attrlist(bitmapcount=5,
|
||||
commonattr=attrs[0],
|
||||
volattr=attrs[1],
|
||||
dirattr=attrs[2],
|
||||
fileattr=attrs[3],
|
||||
forkattr=attrs[4])
|
||||
|
||||
bufsize = _attrbuf_size(attrs)
|
||||
buf = create_string_buffer(bufsize)
|
||||
|
||||
ret = _fgetattrlist(fd, byref(alist), buf, bufsize,
|
||||
options | FSOPT_REPORT_FULLSIZE)
|
||||
|
||||
if ret < 0:
|
||||
err = _get_errno()
|
||||
raise OSError(err, os.strerror(err))
|
||||
|
||||
return _decode_attrlist_result(buf, attrs, options)
|
||||
|
||||
def statfs(path):
|
||||
if not isinstance(path, bytes):
|
||||
path = path.encode('utf-8')
|
||||
result = struct_statfs()
|
||||
ret = _statfs(path, byref(result))
|
||||
if ret < 0:
|
||||
err = _get_errno()
|
||||
raise OSError(err, os.strerror(err), path)
|
||||
return result
|
||||
|
||||
def fstatfs(fd):
|
||||
if hasattr(fd, 'fileno'):
|
||||
fd = fd.fileno()
|
||||
result = struct_statfs()
|
||||
ret = _fstatfs(fd, byref(result))
|
||||
if ret < 0:
|
||||
err = _get_errno()
|
||||
raise OSError(err, os.strerror(err))
|
||||
return result
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/osx.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/osx.pyc
generated
vendored
Normal file
Binary file not shown.
18
buildfiles/node_modules/dmg-builder/vendor/mac_alias/utils.py
generated
vendored
Normal file
18
buildfiles/node_modules/dmg-builder/vendor/mac_alias/utils.py
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
|
||||
ZERO = datetime.timedelta(0)
|
||||
class UTC (datetime.tzinfo):
|
||||
def utcoffset(self, dt):
|
||||
return ZERO
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
def tzname(self, dt):
|
||||
return 'UTC'
|
||||
|
||||
utc = UTC()
|
||||
mac_epoch = datetime.datetime(1904,1,1,0,0,0,0,utc)
|
||||
unix_epoch = datetime.datetime(1970,1,1,0,0,0,0,utc)
|
||||
osx_epoch = datetime.datetime(2001,1,1,0,0,0,0,utc)
|
||||
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/utils.pyc
generated
vendored
Normal file
BIN
buildfiles/node_modules/dmg-builder/vendor/mac_alias/utils.pyc
generated
vendored
Normal file
Binary file not shown.
Reference in New Issue
Block a user