UnityIOCompressionをインポート

This commit is contained in:
水谷圭吾 2021-06-15 15:01:53 +09:00
parent 14b8559950
commit e497c24954
55 changed files with 4512 additions and 0 deletions

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 41ef1fd0a35b840818f4b911480e6acf
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,7 @@
namespace Unity.IO.Compression {
internal enum BlockType {
Uncompressed = 0,
Static = 1,
Dynamic = 2
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 34db5983ec6daa348b2361f9fb7dc7b4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,7 @@
namespace Unity.IO.Compression
{
public enum CompressionMode {
Decompress = 0,
Compress = 1
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6344c619bcacc4e488c3073c5e2c6408
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,70 @@
namespace Unity.IO.Compression
{
using System;
using System.Diagnostics;
internal class CopyEncoder {
// padding for copy encoder formatting
// - 1 byte for header
// - 4 bytes for len, nlen
private const int PaddingSize = 5;
// max uncompressed deflate block size is 64K.
private const int MaxUncompressedBlockSize = 65536;
// null input means write an empty payload with formatting info. This is needed for the final block.
public void GetBlock(DeflateInput input, OutputBuffer output, bool isFinal) {
Debug.Assert(output != null);
Debug.Assert(output.FreeBytes >= PaddingSize);
// determine number of bytes to write
int count = 0;
if (input != null) {
// allow space for padding and bits not yet flushed to buffer
count = Math.Min(input.Count, output.FreeBytes - PaddingSize - output.BitsInBuffer);
// we don't expect the output buffer to ever be this big (currently 4K), but we'll check this
// just in case that changes.
if (count > MaxUncompressedBlockSize - PaddingSize) {
count = MaxUncompressedBlockSize - PaddingSize;
}
}
// write header and flush bits
if (isFinal) {
output.WriteBits(FastEncoderStatics.BFinalNoCompressionHeaderBitCount,
FastEncoderStatics.BFinalNoCompressionHeader);
}
else {
output.WriteBits(FastEncoderStatics.NoCompressionHeaderBitCount,
FastEncoderStatics.NoCompressionHeader);
}
// now we're aligned
output.FlushBits();
// write len, nlen
WriteLenNLen((ushort)count, output);
// write uncompressed bytes
if (input != null && count > 0) {
output.WriteBytes(input.Buffer, input.StartIndex, count);
input.ConsumeBytes(count);
}
}
private void WriteLenNLen(ushort len, OutputBuffer output) {
// len
output.WriteUInt16(len);
// nlen
ushort onesComp = (ushort)(~(ushort)len);
output.WriteUInt16(onesComp);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8e17d68d5f93ac34baf82f1d28d44e48
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,88 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
namespace Unity.IO.Compression
{
using System.Diagnostics;
internal static class Crc32Helper
{
// Table for CRC calculation
static readonly uint[] crcTable = new uint[256] {
0x00000000u, 0x77073096u, 0xee0e612cu, 0x990951bau, 0x076dc419u,
0x706af48fu, 0xe963a535u, 0x9e6495a3u, 0x0edb8832u, 0x79dcb8a4u,
0xe0d5e91eu, 0x97d2d988u, 0x09b64c2bu, 0x7eb17cbdu, 0xe7b82d07u,
0x90bf1d91u, 0x1db71064u, 0x6ab020f2u, 0xf3b97148u, 0x84be41deu,
0x1adad47du, 0x6ddde4ebu, 0xf4d4b551u, 0x83d385c7u, 0x136c9856u,
0x646ba8c0u, 0xfd62f97au, 0x8a65c9ecu, 0x14015c4fu, 0x63066cd9u,
0xfa0f3d63u, 0x8d080df5u, 0x3b6e20c8u, 0x4c69105eu, 0xd56041e4u,
0xa2677172u, 0x3c03e4d1u, 0x4b04d447u, 0xd20d85fdu, 0xa50ab56bu,
0x35b5a8fau, 0x42b2986cu, 0xdbbbc9d6u, 0xacbcf940u, 0x32d86ce3u,
0x45df5c75u, 0xdcd60dcfu, 0xabd13d59u, 0x26d930acu, 0x51de003au,
0xc8d75180u, 0xbfd06116u, 0x21b4f4b5u, 0x56b3c423u, 0xcfba9599u,
0xb8bda50fu, 0x2802b89eu, 0x5f058808u, 0xc60cd9b2u, 0xb10be924u,
0x2f6f7c87u, 0x58684c11u, 0xc1611dabu, 0xb6662d3du, 0x76dc4190u,
0x01db7106u, 0x98d220bcu, 0xefd5102au, 0x71b18589u, 0x06b6b51fu,
0x9fbfe4a5u, 0xe8b8d433u, 0x7807c9a2u, 0x0f00f934u, 0x9609a88eu,
0xe10e9818u, 0x7f6a0dbbu, 0x086d3d2du, 0x91646c97u, 0xe6635c01u,
0x6b6b51f4u, 0x1c6c6162u, 0x856530d8u, 0xf262004eu, 0x6c0695edu,
0x1b01a57bu, 0x8208f4c1u, 0xf50fc457u, 0x65b0d9c6u, 0x12b7e950u,
0x8bbeb8eau, 0xfcb9887cu, 0x62dd1ddfu, 0x15da2d49u, 0x8cd37cf3u,
0xfbd44c65u, 0x4db26158u, 0x3ab551ceu, 0xa3bc0074u, 0xd4bb30e2u,
0x4adfa541u, 0x3dd895d7u, 0xa4d1c46du, 0xd3d6f4fbu, 0x4369e96au,
0x346ed9fcu, 0xad678846u, 0xda60b8d0u, 0x44042d73u, 0x33031de5u,
0xaa0a4c5fu, 0xdd0d7cc9u, 0x5005713cu, 0x270241aau, 0xbe0b1010u,
0xc90c2086u, 0x5768b525u, 0x206f85b3u, 0xb966d409u, 0xce61e49fu,
0x5edef90eu, 0x29d9c998u, 0xb0d09822u, 0xc7d7a8b4u, 0x59b33d17u,
0x2eb40d81u, 0xb7bd5c3bu, 0xc0ba6cadu, 0xedb88320u, 0x9abfb3b6u,
0x03b6e20cu, 0x74b1d29au, 0xead54739u, 0x9dd277afu, 0x04db2615u,
0x73dc1683u, 0xe3630b12u, 0x94643b84u, 0x0d6d6a3eu, 0x7a6a5aa8u,
0xe40ecf0bu, 0x9309ff9du, 0x0a00ae27u, 0x7d079eb1u, 0xf00f9344u,
0x8708a3d2u, 0x1e01f268u, 0x6906c2feu, 0xf762575du, 0x806567cbu,
0x196c3671u, 0x6e6b06e7u, 0xfed41b76u, 0x89d32be0u, 0x10da7a5au,
0x67dd4accu, 0xf9b9df6fu, 0x8ebeeff9u, 0x17b7be43u, 0x60b08ed5u,
0xd6d6a3e8u, 0xa1d1937eu, 0x38d8c2c4u, 0x4fdff252u, 0xd1bb67f1u,
0xa6bc5767u, 0x3fb506ddu, 0x48b2364bu, 0xd80d2bdau, 0xaf0a1b4cu,
0x36034af6u, 0x41047a60u, 0xdf60efc3u, 0xa867df55u, 0x316e8eefu,
0x4669be79u, 0xcb61b38cu, 0xbc66831au, 0x256fd2a0u, 0x5268e236u,
0xcc0c7795u, 0xbb0b4703u, 0x220216b9u, 0x5505262fu, 0xc5ba3bbeu,
0xb2bd0b28u, 0x2bb45a92u, 0x5cb36a04u, 0xc2d7ffa7u, 0xb5d0cf31u,
0x2cd99e8bu, 0x5bdeae1du, 0x9b64c2b0u, 0xec63f226u, 0x756aa39cu,
0x026d930au, 0x9c0906a9u, 0xeb0e363fu, 0x72076785u, 0x05005713u,
0x95bf4a82u, 0xe2b87a14u, 0x7bb12baeu, 0x0cb61b38u, 0x92d28e9bu,
0xe5d5be0du, 0x7cdcefb7u, 0x0bdbdf21u, 0x86d3d2d4u, 0xf1d4e242u,
0x68ddb3f8u, 0x1fda836eu, 0x81be16cdu, 0xf6b9265bu, 0x6fb077e1u,
0x18b74777u, 0x88085ae6u, 0xff0f6a70u, 0x66063bcau, 0x11010b5cu,
0x8f659effu, 0xf862ae69u, 0x616bffd3u, 0x166ccf45u, 0xa00ae278u,
0xd70dd2eeu, 0x4e048354u, 0x3903b3c2u, 0xa7672661u, 0xd06016f7u,
0x4969474du, 0x3e6e77dbu, 0xaed16a4au, 0xd9d65adcu, 0x40df0b66u,
0x37d83bf0u, 0xa9bcae53u, 0xdebb9ec5u, 0x47b2cf7fu, 0x30b5ffe9u,
0xbdbdf21cu, 0xcabac28au, 0x53b39330u, 0x24b4a3a6u, 0xbad03605u,
0xcdd70693u, 0x54de5729u, 0x23d967bfu, 0xb3667a2eu, 0xc4614ab8u,
0x5d681b02u, 0x2a6f2b94u, 0xb40bbe37u, 0xc30c8ea1u, 0x5a05df1bu,
0x2d02ef8du
};
// Calculate CRC based on the old CRC and the new bytes
// See RFC1952 for details.
static public uint UpdateCrc32(uint crc32, byte[] buffer, int offset, int length)
{
Debug.Assert((buffer != null) && (offset >= 0) && (length >= 0)
&& (offset <= buffer.Length - length), "check the caller");
crc32 ^= 0xffffffffU;
while (--length >= 0)
{
crc32 = crcTable[(crc32 ^ buffer[offset++]) & 0xFF] ^ (crc32 >> 8);
}
crc32 ^= 0xffffffffU;
return crc32;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: baa342fbc2f8ff9408912e3dc33456b0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,61 @@
namespace Unity.IO.Compression {
using System.Diagnostics;
internal class DeflateInput {
private byte[] buffer;
private int count;
private int startIndex;
internal byte[] Buffer {
get {
return buffer;
}
set {
buffer = value;
}
}
internal int Count {
get {
return count;
}
set {
count = value;
}
}
internal int StartIndex {
get {
return startIndex;
}
set {
startIndex = value;
}
}
internal void ConsumeBytes(int n) {
Debug.Assert(n <= count, "Should use more bytes than what we have in the buffer");
startIndex += n;
count -= n;
Debug.Assert(startIndex + count <= buffer.Length, "Input buffer is in invalid state!");
}
internal InputState DumpState() {
InputState savedState;
savedState.count = count;
savedState.startIndex = startIndex;
return savedState;
}
internal void RestoreState(InputState state) {
count = state.count;
startIndex = state.startIndex;
}
internal struct InputState {
internal int count;
internal int startIndex;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a70fb53a0a617584bae8e3dbb5432dce
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,628 @@
///----------- ----------- ----------- ----------- ----------- ----------- -----------
/// <copyright file="DeflateStream.cs" company="Microsoft">
/// Copyright (c) Microsoft Corporation. All rights reserved.
/// </copyright>
///
///----------- ----------- ----------- ----------- ----------- ----------- -----------
///
using System;
using System.IO;
using System.Diagnostics;
using System.Threading;
namespace Unity.IO.Compression {
public class DeflateStream : Stream {
internal const int DefaultBufferSize = 8192;
internal delegate void AsyncWriteDelegate(byte[] array, int offset, int count, bool isAsync);
//private const String OrigStackTrace_ExceptionDataKey = "ORIGINAL_STACK_TRACE";
private Stream _stream;
private CompressionMode _mode;
private bool _leaveOpen;
private Inflater inflater;
private IDeflater deflater;
private byte[] buffer;
private int asyncOperations;
#if !NETFX_CORE
private readonly AsyncCallback m_CallBack;
private readonly AsyncWriteDelegate m_AsyncWriterDelegate;
#endif
private IFileFormatWriter formatWriter;
private bool wroteHeader;
private bool wroteBytes;
private enum WorkerType : byte { Managed, Unknown };
public DeflateStream(Stream stream, CompressionMode mode)
: this(stream, mode, false) {
}
public DeflateStream(Stream stream, CompressionMode mode, bool leaveOpen) {
if(stream == null )
throw new ArgumentNullException("stream");
if (CompressionMode.Compress != mode && CompressionMode.Decompress != mode)
throw new ArgumentException(SR.GetString(SR.ArgumentOutOfRange_Enum), "mode");
_stream = stream;
_mode = mode;
_leaveOpen = leaveOpen;
switch (_mode) {
case CompressionMode.Decompress:
if (!_stream.CanRead) {
throw new ArgumentException(SR.GetString(SR.NotReadableStream), "stream");
}
inflater = new Inflater();
#if !NETFX_CORE
m_CallBack = new AsyncCallback(ReadCallback);
#endif
break;
case CompressionMode.Compress:
if (!_stream.CanWrite) {
throw new ArgumentException(SR.GetString(SR.NotWriteableStream), "stream");
}
deflater = CreateDeflater();
#if !NETFX_CORE
m_AsyncWriterDelegate = new AsyncWriteDelegate(this.InternalWrite);
m_CallBack = new AsyncCallback(WriteCallback);
#endif
break;
} // switch (_mode)
buffer = new byte[DefaultBufferSize];
}
private static IDeflater CreateDeflater() {
switch (GetDeflaterType()) {
case WorkerType.Managed:
return new DeflaterManaged();
default:
// We do not expect this to ever be thrown.
// But this is better practice than returning null.
#if NETFX_CORE
throw new Exception("Program entered an unexpected state.");
#else
throw new SystemException("Program entered an unexpected state.");
#endif
}
}
#if !SILVERLIGHT
[System.Security.SecuritySafeCritical]
#endif
private static WorkerType GetDeflaterType() {
return WorkerType.Managed;
}
internal void SetFileFormatReader(IFileFormatReader reader) {
if (reader != null) {
inflater.SetFileFormatReader(reader);
}
}
internal void SetFileFormatWriter(IFileFormatWriter writer) {
if (writer != null) {
formatWriter = writer;
}
}
public Stream BaseStream {
get {
return _stream;
}
}
public override bool CanRead {
get {
if( _stream == null) {
return false;
}
return (_mode == CompressionMode.Decompress && _stream.CanRead);
}
}
public override bool CanWrite {
get {
if( _stream == null) {
return false;
}
return (_mode == CompressionMode.Compress && _stream.CanWrite);
}
}
public override bool CanSeek {
get {
return false;
}
}
public override long Length {
get {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
}
public override long Position {
get {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
set {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
}
public override void Flush() {
EnsureNotDisposed();
return;
}
public override long Seek(long offset, SeekOrigin origin) {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
public override void SetLength(long value) {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
public override int Read(byte[] array, int offset, int count) {
EnsureDecompressionMode();
ValidateParameters(array, offset, count);
EnsureNotDisposed();
int bytesRead;
int currentOffset = offset;
int remainingCount = count;
while(true) {
bytesRead = inflater.Inflate(array, currentOffset, remainingCount);
currentOffset += bytesRead;
remainingCount -= bytesRead;
if( remainingCount == 0) {
break;
}
if (inflater.Finished() ) {
// if we finished decompressing, we can't have anything left in the outputwindow.
Debug.Assert(inflater.AvailableOutput == 0, "We should have copied all stuff out!");
break;
}
Debug.Assert(inflater.NeedsInput(), "We can only run into this case if we are short of input");
int bytes = _stream.Read(buffer, 0, buffer.Length);
if( bytes == 0) {
break; //Do we want to throw an exception here?
}
inflater.SetInput(buffer, 0 , bytes);
}
return count - remainingCount;
}
private void ValidateParameters(byte[] array, int offset, int count) {
if (array==null)
throw new ArgumentNullException("array");
if (offset < 0)
throw new ArgumentOutOfRangeException("offset");
if (count < 0)
throw new ArgumentOutOfRangeException("count");
if (array.Length - offset < count)
throw new ArgumentException(SR.GetString(SR.InvalidArgumentOffsetCount));
}
private void EnsureNotDisposed() {
if (_stream == null)
throw new ObjectDisposedException(null, SR.GetString(SR.ObjectDisposed_StreamClosed));
}
private void EnsureDecompressionMode() {
if( _mode != CompressionMode.Decompress)
throw new InvalidOperationException(SR.GetString(SR.CannotReadFromDeflateStream));
}
private void EnsureCompressionMode() {
if( _mode != CompressionMode.Compress)
throw new InvalidOperationException(SR.GetString(SR.CannotWriteToDeflateStream));
}
#if !NETFX_CORE
public override IAsyncResult BeginRead(byte[] array, int offset, int count, AsyncCallback asyncCallback, object asyncState) {
EnsureDecompressionMode();
// We use this checking order for compat to earlier versions:
if (asyncOperations != 0)
throw new InvalidOperationException(SR.GetString(SR.InvalidBeginCall));
ValidateParameters(array, offset, count);
EnsureNotDisposed();
Interlocked.Increment(ref asyncOperations);
try {
DeflateStreamAsyncResult userResult = new DeflateStreamAsyncResult(
this, asyncState, asyncCallback, array, offset, count);
userResult.isWrite = false;
// Try to read decompressed data in output buffer
int bytesRead = inflater.Inflate(array, offset, count);
if( bytesRead != 0) {
// If decompression output buffer is not empty, return immediately.
// 'true' means we complete synchronously.
userResult.InvokeCallback(true, (object) bytesRead);
return userResult;
}
if (inflater.Finished() ) {
// end of compression stream
userResult.InvokeCallback(true, (object) 0);
return userResult;
}
// If there is no data on the output buffer and we are not at
// the end of the stream, we need to get more data from the base stream
_stream.BeginRead(buffer, 0, buffer.Length, m_CallBack, userResult);
userResult.m_CompletedSynchronously &= userResult.IsCompleted;
return userResult;
} catch {
Interlocked.Decrement( ref asyncOperations);
throw;
}
}
// callback function for asynchrous reading on base stream
private void ReadCallback(IAsyncResult baseStreamResult) {
DeflateStreamAsyncResult outerResult = (DeflateStreamAsyncResult) baseStreamResult.AsyncState;
outerResult.m_CompletedSynchronously &= baseStreamResult.CompletedSynchronously;
int bytesRead = 0;
try {
EnsureNotDisposed();
bytesRead = _stream.EndRead(baseStreamResult);
if (bytesRead <= 0 ) {
// This indicates the base stream has received EOF
outerResult.InvokeCallback((object) 0);
return;
}
// Feed the data from base stream into decompression engine
inflater.SetInput(buffer, 0 , bytesRead);
bytesRead = inflater.Inflate(outerResult.buffer, outerResult.offset, outerResult.count);
if (bytesRead == 0 && !inflater.Finished()) {
// We could have read in head information and didn't get any data.
// Read from the base stream again.
// Need to solve recusion.
_stream.BeginRead(buffer, 0, buffer.Length, m_CallBack, outerResult);
} else {
outerResult.InvokeCallback((object) bytesRead);
}
} catch (Exception exc) {
// Defer throwing this until EndRead where we will likely have user code on the stack.
outerResult.InvokeCallback(exc);
return;
}
}
public override int EndRead(IAsyncResult asyncResult) {
EnsureDecompressionMode();
CheckEndXxxxLegalStateAndParams(asyncResult);
// We checked that this will work in CheckEndXxxxLegalStateAndParams:
DeflateStreamAsyncResult deflateStrmAsyncResult = (DeflateStreamAsyncResult) asyncResult;
AwaitAsyncResultCompletion(deflateStrmAsyncResult);
Exception previousException = deflateStrmAsyncResult.Result as Exception;
if (previousException != null) {
// Rethrowing will delete the stack trace. Let's help future debuggers:
//previousException.Data.Add(OrigStackTrace_ExceptionDataKey, previousException.StackTrace);
throw previousException;
}
return (int) deflateStrmAsyncResult.Result;
}
#endif
public override void Write(byte[] array, int offset, int count) {
EnsureCompressionMode();
ValidateParameters(array, offset, count);
EnsureNotDisposed();
InternalWrite(array, offset, count, false);
}
// isAsync always seems to be false. why do we have it?
internal void InternalWrite(byte[] array, int offset, int count, bool isAsync) {
DoMaintenance(array, offset, count);
// Write compressed the bytes we already passed to the deflater:
WriteDeflaterOutput(isAsync);
// Pass new bytes through deflater and write them too:
deflater.SetInput(array, offset, count);
WriteDeflaterOutput(isAsync);
}
private void WriteDeflaterOutput(bool isAsync) {
while (!deflater.NeedsInput()) {
int compressedBytes = deflater.GetDeflateOutput(buffer);
if (compressedBytes > 0)
DoWrite(buffer, 0, compressedBytes, isAsync);
}
}
private void DoWrite(byte[] array, int offset, int count, bool isAsync) {
Debug.Assert(array != null);
Debug.Assert(count != 0);
#if !NETFX_CORE
if (isAsync) {
IAsyncResult result = _stream.BeginWrite(array, offset, count, null, null);
_stream.EndWrite(result);
}
else
#endif
{
_stream.Write(array, offset, count);
}
}
// Perform deflate-mode maintenance required due to custom header and footer writers
// (e.g. set by GZipStream):
private void DoMaintenance(byte[] array, int offset, int count) {
// If no bytes written, do nothing:
if (count <= 0)
return;
// Note that stream contains more than zero data bytes:
wroteBytes = true;
// If no header/footer formatter present, nothing else to do:
if (formatWriter == null)
return;
// If formatter has not yet written a header, do it now:
if (!wroteHeader) {
byte[] b = formatWriter.GetHeader();
_stream.Write(b, 0, b.Length);
wroteHeader = true;
}
// Inform formatter of the data bytes written:
formatWriter.UpdateWithBytesRead(array, offset, count);
}
// This is called by Dispose:
private void PurgeBuffers(bool disposing) {
if (!disposing)
return;
if (_stream == null)
return;
Flush();
if (_mode != CompressionMode.Compress)
return;
// Some deflaters (e.g. ZLib write more than zero bytes for zero bytes inpuits.
// This round-trips and we should be ok with this, but our legacy managed deflater
// always wrote zero output for zero input and upstack code (e.g. GZipStream)
// took dependencies on it. Thus, make sure to only "flush" when we actually had
// some input:
if (wroteBytes) {
// Compress any bytes left:
WriteDeflaterOutput(false);
// Pull out any bytes left inside deflater:
bool finished;
do {
int compressedBytes;
finished = deflater.Finish(buffer, out compressedBytes);
if (compressedBytes > 0)
DoWrite(buffer, 0, compressedBytes, false);
} while (!finished);
}
// Write format footer:
if (formatWriter != null && wroteHeader) {
byte[] b = formatWriter.GetFooter();
_stream.Write(b, 0, b.Length);
}
}
protected override void Dispose(bool disposing) {
try {
PurgeBuffers(disposing);
} finally {
// Close the underlying stream even if PurgeBuffers threw.
// Stream.Close() may throw here (may or may not be due to the same error).
// In this case, we still need to clean up internal resources, hence the inner finally blocks.
try {
if(disposing && !_leaveOpen && _stream != null)
_stream.Dispose();
} finally {
_stream = null;
try {
if (deflater != null)
deflater.Dispose();
} finally {
deflater = null;
base.Dispose(disposing);
}
} // finally
} // finally
} // Dispose
#if !NETFX_CORE
public override IAsyncResult BeginWrite(byte[] array, int offset, int count, AsyncCallback asyncCallback, object asyncState) {
EnsureCompressionMode();
// We use this checking order for compat to earlier versions:
if (asyncOperations != 0 )
throw new InvalidOperationException(SR.GetString(SR.InvalidBeginCall));
ValidateParameters(array, offset, count);
EnsureNotDisposed();
Interlocked.Increment(ref asyncOperations);
try {
DeflateStreamAsyncResult userResult = new DeflateStreamAsyncResult(
this, asyncState, asyncCallback, array, offset, count);
userResult.isWrite = true;
m_AsyncWriterDelegate.BeginInvoke(array, offset, count, true, m_CallBack, userResult);
userResult.m_CompletedSynchronously &= userResult.IsCompleted;
return userResult;
} catch {
Interlocked.Decrement(ref asyncOperations);
throw;
}
}
// Callback function for asynchrous reading on base stream
private void WriteCallback(IAsyncResult asyncResult) {
DeflateStreamAsyncResult outerResult = (DeflateStreamAsyncResult) asyncResult.AsyncState;
outerResult.m_CompletedSynchronously &= asyncResult.CompletedSynchronously;
try {
m_AsyncWriterDelegate.EndInvoke(asyncResult);
} catch (Exception exc) {
// Defer throwing this until EndWrite where there is user code on the stack:
outerResult.InvokeCallback(exc);
return;
}
outerResult.InvokeCallback(null);
}
public override void EndWrite(IAsyncResult asyncResult) {
EnsureCompressionMode();
CheckEndXxxxLegalStateAndParams(asyncResult);
// We checked that this will work in CheckEndXxxxLegalStateAndParams:
DeflateStreamAsyncResult deflateStrmAsyncResult = (DeflateStreamAsyncResult) asyncResult;
AwaitAsyncResultCompletion(deflateStrmAsyncResult);
Exception previousException = deflateStrmAsyncResult.Result as Exception;
if (previousException != null) {
// Rethrowing will delete the stack trace. Let's help future debuggers:
//previousException.Data.Add(OrigStackTrace_ExceptionDataKey, previousException.StackTrace);
throw previousException;
}
}
private void CheckEndXxxxLegalStateAndParams(IAsyncResult asyncResult) {
if (asyncOperations != 1)
throw new InvalidOperationException(SR.GetString(SR.InvalidEndCall));
if (asyncResult == null)
throw new ArgumentNullException("asyncResult");
EnsureNotDisposed();
DeflateStreamAsyncResult myResult = asyncResult as DeflateStreamAsyncResult;
// This should really be an ArgumentException, but we keep this for compat to previous versions:
if (myResult == null)
throw new ArgumentNullException("asyncResult");
}
private void AwaitAsyncResultCompletion(DeflateStreamAsyncResult asyncResult) {
try {
if (!asyncResult.IsCompleted)
asyncResult.AsyncWaitHandle.WaitOne();
} finally {
Interlocked.Decrement(ref asyncOperations);
asyncResult.Close(); // this will just close the wait handle
}
}
#endif
} // public class DeflateStream
} // namespace Unity.IO.Compression

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 06f3733a1fd50b647bc1be13d8a40d5b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,132 @@
#if !NETFX_CORE
namespace Unity.IO.Compression {
using System;
using System.Threading;
internal class DeflateStreamAsyncResult : IAsyncResult {
public byte[] buffer;
public int offset;
public int count;
// disable csharp compiler warning #0414: field assigned unused value
#pragma warning disable 0414
public bool isWrite;
#pragma warning restore 0414
private object m_AsyncObject; // Caller's async object.
private object m_AsyncState; // Caller's state object.
private AsyncCallback m_AsyncCallback; // Caller's callback method.
private object m_Result; // Final IO result to be returned byt the End*() method.
internal bool m_CompletedSynchronously; // true if the operation completed synchronously.
private int m_InvokedCallback; // 0 is callback is not called
private int m_Completed; // 0 if not completed >0 otherwise.
private object m_Event; // lazy allocated event to be returned in the IAsyncResult for the client to wait on
public DeflateStreamAsyncResult(object asyncObject, object asyncState,
AsyncCallback asyncCallback,
byte[] buffer, int offset, int count) {
this.buffer = buffer;
this.offset = offset;
this.count = count;
m_CompletedSynchronously = true;
m_AsyncObject = asyncObject;
m_AsyncState = asyncState;
m_AsyncCallback = asyncCallback;
}
// Interface method to return the caller's state object.
public object AsyncState {
get {
return m_AsyncState;
}
}
// Interface property to return a WaitHandle that can be waited on for I/O completion.
// This property implements lazy event creation.
// the event object is only created when this property is accessed,
// since we're internally only using callbacks, as long as the user is using
// callbacks as well we will not create an event at all.
public WaitHandle AsyncWaitHandle {
get {
// save a copy of the completion status
int savedCompleted = m_Completed;
if (m_Event == null) {
// lazy allocation of the event:
// if this property is never accessed this object is never created
Interlocked.CompareExchange(ref m_Event, new ManualResetEvent(savedCompleted != 0), null);
}
ManualResetEvent castedEvent = (ManualResetEvent)m_Event;
if (savedCompleted == 0 && m_Completed != 0) {
// if, while the event was created in the reset state,
// the IO operation completed, set the event here.
castedEvent.Set();
}
return castedEvent;
}
}
// Interface property, returning synchronous completion status.
public bool CompletedSynchronously {
get {
return m_CompletedSynchronously;
}
}
// Interface property, returning completion status.
public bool IsCompleted {
get {
return m_Completed != 0;
}
}
// Internal property for setting the IO result.
internal object Result {
get {
return m_Result;
}
}
internal void Close() {
if (m_Event != null) {
((ManualResetEvent)m_Event).Close();
}
}
internal void InvokeCallback(bool completedSynchronously, object result) {
Complete(completedSynchronously, result);
}
internal void InvokeCallback(object result) {
Complete(result);
}
// Internal method for setting completion.
// As a side effect, we'll signal the WaitHandle event and clean up.
private void Complete(bool completedSynchronously, object result) {
m_CompletedSynchronously = completedSynchronously;
Complete(result);
}
private void Complete(object result) {
m_Result = result;
// Set IsCompleted and the event only after the usercallback method.
Interlocked.Increment(ref m_Completed);
if (m_Event != null) {
((ManualResetEvent)m_Event).Set();
}
if (Interlocked.Increment(ref m_InvokedCallback) == 1) {
if (m_AsyncCallback != null) {
m_AsyncCallback(this);
}
}
}
}
}
#endif

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 769692916772bbf46b1a2a0a4a1ff5e6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,294 @@
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// zlib.h -- interface of the 'zlib' general purpose compression library
// version 1.2.1, November 17th, 2003
//
// Copyright (C) 1995-2003 Jean-loup Gailly and Mark Adler
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
// 3. This notice may not be removed or altered from any source distribution.
//
//
// ==--==
// Compression engine
using System;
using System.Diagnostics;
namespace Unity.IO.Compression {
internal class DeflaterManaged : IDeflater {
private const int MinBlockSize = 256;
private const int MaxHeaderFooterGoo = 120;
private const int CleanCopySize = DeflateStream.DefaultBufferSize - MaxHeaderFooterGoo;
private const double BadCompressionThreshold = 1.0;
private FastEncoder deflateEncoder;
private CopyEncoder copyEncoder;
private DeflateInput input;
private OutputBuffer output;
private DeflaterState processingState;
private DeflateInput inputFromHistory;
internal DeflaterManaged() {
deflateEncoder = new FastEncoder();
copyEncoder = new CopyEncoder();
input = new DeflateInput();
output = new OutputBuffer();
processingState = DeflaterState.NotStarted;
}
private bool NeedsInput() {
// Convenience method to call NeedsInput privately without a cast.
return ((IDeflater) this).NeedsInput();
}
bool IDeflater.NeedsInput() {
return input.Count == 0 && deflateEncoder.BytesInHistory == 0;
}
// Sets the input to compress. The only buffer copy occurs when the input is copied
// to the FastEncoderWindow
void IDeflater.SetInput(byte[] inputBuffer, int startIndex, int count) {
Debug.Assert(input.Count == 0, "We have something left in previous input!");
input.Buffer = inputBuffer;
input.Count = count;
input.StartIndex = startIndex;
if (count > 0 && count < MinBlockSize) {
// user is writing small buffers. If buffer size is below MinBlockSize, we
// need to switch to a small data mode, to avoid block headers and footers
// dominating the output.
switch (processingState) {
case DeflaterState.NotStarted :
case DeflaterState.CheckingForIncompressible:
// clean states, needs a block header first
processingState = DeflaterState.StartingSmallData;
break;
case DeflaterState.CompressThenCheck:
// already has correct block header
processingState = DeflaterState.HandlingSmallData;
break;
}
}
}
int IDeflater.GetDeflateOutput(byte[] outputBuffer) {
Debug.Assert(outputBuffer != null, "Can't pass in a null output buffer!");
Debug.Assert(!NeedsInput(), "GetDeflateOutput should only be called after providing input");
output.UpdateBuffer(outputBuffer);
switch(processingState) {
case DeflaterState.NotStarted: {
// first call. Try to compress but if we get bad compression ratio, switch to uncompressed blocks.
Debug.Assert(deflateEncoder.BytesInHistory == 0, "have leftover bytes in window");
// save these in case we need to switch to uncompressed format
DeflateInput.InputState initialInputState = input.DumpState();
OutputBuffer.BufferState initialOutputState = output.DumpState();
deflateEncoder.GetBlockHeader(output);
deflateEncoder.GetCompressedData(input, output);
if (!UseCompressed(deflateEncoder.LastCompressionRatio)) {
// we're expanding; restore state and switch to uncompressed
input.RestoreState(initialInputState);
output.RestoreState(initialOutputState);
copyEncoder.GetBlock(input, output, false);
FlushInputWindows();
processingState = DeflaterState.CheckingForIncompressible;
}
else {
processingState = DeflaterState.CompressThenCheck;
}
break;
}
case DeflaterState.CompressThenCheck: {
// continue assuming data is compressible. If we reach data that indicates otherwise
// finish off remaining data in history and decide whether to compress on a
// block-by-block basis
deflateEncoder.GetCompressedData(input, output);
if (!UseCompressed(deflateEncoder.LastCompressionRatio)) {
processingState = DeflaterState.SlowDownForIncompressible1;
inputFromHistory = deflateEncoder.UnprocessedInput;
}
break;
}
case DeflaterState.SlowDownForIncompressible1: {
// finish off previous compressed block
deflateEncoder.GetBlockFooter(output);
processingState = DeflaterState.SlowDownForIncompressible2;
goto case DeflaterState.SlowDownForIncompressible2; // yeah I know, but there's no fallthrough
}
case DeflaterState.SlowDownForIncompressible2: {
// clear out data from history, but add them as uncompressed blocks
if (inputFromHistory.Count > 0) {
copyEncoder.GetBlock(inputFromHistory, output, false);
}
if (inputFromHistory.Count == 0) {
// now we're clean
deflateEncoder.FlushInput();
processingState = DeflaterState.CheckingForIncompressible;
}
break;
}
case DeflaterState.CheckingForIncompressible: {
// decide whether to compress on a block-by-block basis
Debug.Assert(deflateEncoder.BytesInHistory == 0, "have leftover bytes in window");
// save these in case we need to store as uncompressed
DeflateInput.InputState initialInputState = input.DumpState();
OutputBuffer.BufferState initialOutputState = output.DumpState();
// enforce max so we can ensure state between calls
deflateEncoder.GetBlock(input, output, CleanCopySize);
if (!UseCompressed(deflateEncoder.LastCompressionRatio)) {
// we're expanding; restore state and switch to uncompressed
input.RestoreState(initialInputState);
output.RestoreState(initialOutputState);
copyEncoder.GetBlock(input, output, false);
FlushInputWindows();
}
break;
}
case DeflaterState.StartingSmallData: {
// add compressed header and data, but not footer. Subsequent calls will keep
// adding compressed data (no header and no footer). We're doing this to
// avoid overhead of header and footer size relative to compressed payload.
deflateEncoder.GetBlockHeader(output);
processingState = DeflaterState.HandlingSmallData;
goto case DeflaterState.HandlingSmallData; // yeah I know, but there's no fallthrough
}
case DeflaterState.HandlingSmallData: {
// continue adding compressed data
deflateEncoder.GetCompressedData(input, output);
break;
}
}
return output.BytesWritten;
}
bool IDeflater.Finish(byte[] outputBuffer, out int bytesRead) {
Debug.Assert(outputBuffer != null, "Can't pass in a null output buffer!");
Debug.Assert(processingState == DeflaterState.NotStarted ||
processingState == DeflaterState.CheckingForIncompressible ||
processingState == DeflaterState.HandlingSmallData ||
processingState == DeflaterState.CompressThenCheck ||
processingState == DeflaterState.SlowDownForIncompressible1,
"got unexpected processing state = " + processingState);
Debug.Assert(NeedsInput());
// no need to add end of block info if we didn't write anything
if (processingState == DeflaterState.NotStarted) {
bytesRead = 0;
return true;
}
output.UpdateBuffer(outputBuffer);
if (processingState == DeflaterState.CompressThenCheck ||
processingState == DeflaterState.HandlingSmallData ||
processingState == DeflaterState.SlowDownForIncompressible1) {
// need to finish off block
deflateEncoder.GetBlockFooter(output);
}
// write final block
WriteFinal();
bytesRead = output.BytesWritten;
return true;
}
void IDisposable.Dispose() { }
protected void Dispose(bool disposing) { }
// Is compression ratio under threshold?
private bool UseCompressed(double ratio) {
return (ratio <= BadCompressionThreshold);
}
private void FlushInputWindows() {
deflateEncoder.FlushInput();
}
private void WriteFinal() {
copyEncoder.GetBlock(null, output, true);
}
// These states allow us to assume that data is compressible and keep compression ratios at least
// as good as historical values, but switch to different handling if that approach may increase the
// data. If we detect we're getting a bad compression ratio, we switch to CheckingForIncompressible
// state and decide to compress on a block by block basis.
//
// If we're getting small data buffers, we want to avoid overhead of excessive header and footer
// info, so we add one header and keep adding blocks as compressed. This means that if the user uses
// small buffers, they won't get the "don't increase size" improvements.
//
// An earlier iteration of this fix handled that data separately by buffering this data until it
// reached a reasonable size, but given that Flush is not implemented on DeflateStream, this meant
// data could be flushed only on Dispose. In the future, it would be reasonable to revisit this, in
// case this isn't breaking.
//
// NotStarted -> CheckingForIncompressible, CompressThenCheck, StartingSmallData
// CompressThenCheck -> SlowDownForIncompressible1
// SlowDownForIncompressible1 -> SlowDownForIncompressible2
// SlowDownForIncompressible2 -> CheckingForIncompressible
// StartingSmallData -> HandlingSmallData
private enum DeflaterState {
// no bytes to write yet
NotStarted,
// transient states
SlowDownForIncompressible1,
SlowDownForIncompressible2,
StartingSmallData,
// stable state: may transition to CheckingForIncompressible (via transient states) if it
// appears we're expanding data
CompressThenCheck,
// sink states
CheckingForIncompressible,
HandlingSmallData
}
} // internal class DeflaterManaged
} // namespace Unity.IO.Compression

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5cb3d0941cfb3c746aef8c291ebe3eb2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,179 @@
namespace Unity.IO.Compression {
using System;
using System.Diagnostics;
using System.Globalization;
internal class FastEncoder {
private FastEncoderWindow inputWindow; // input history window
private Match currentMatch; // current match in history window
private double lastCompressionRatio;
public FastEncoder() {
inputWindow = new FastEncoderWindow();
currentMatch = new Match();
}
internal int BytesInHistory {
get {
return inputWindow.BytesAvailable;
}
}
internal DeflateInput UnprocessedInput {
get {
return inputWindow.UnprocessedInput;
}
}
internal void FlushInput() {
inputWindow.FlushWindow();
}
internal Double LastCompressionRatio {
get { return lastCompressionRatio; }
}
// Copy the compressed bytes to output buffer as a block. maxBytesToCopy limits the number of
// bytes we can copy from input. Set to any value < 1 if no limit
internal void GetBlock(DeflateInput input, OutputBuffer output, int maxBytesToCopy) {
Debug.Assert(InputAvailable(input), "call SetInput before trying to compress!");
WriteDeflatePreamble(output);
GetCompressedOutput(input, output, maxBytesToCopy);
WriteEndOfBlock(output);
}
// Compress data but don't format as block (doesn't have header and footer)
internal void GetCompressedData(DeflateInput input, OutputBuffer output) {
GetCompressedOutput(input, output, -1);
}
internal void GetBlockHeader(OutputBuffer output) {
WriteDeflatePreamble(output);
}
internal void GetBlockFooter(OutputBuffer output) {
WriteEndOfBlock(output);
}
// maxBytesToCopy limits the number of bytes we can copy from input. Set to any value < 1 if no limit
private void GetCompressedOutput(DeflateInput input, OutputBuffer output, int maxBytesToCopy) {
// snapshot for compression ratio stats
int bytesWrittenPre = output.BytesWritten;
int bytesConsumedFromInput = 0;
int inputBytesPre = BytesInHistory + input.Count;
do {
// read more input data into the window if there is space available
int bytesToCopy = (input.Count < inputWindow.FreeWindowSpace) ?
input.Count : inputWindow.FreeWindowSpace;
if (maxBytesToCopy >= 1) {
bytesToCopy = Math.Min(bytesToCopy, maxBytesToCopy - bytesConsumedFromInput);
}
if (bytesToCopy > 0) {
// copy data into history window
inputWindow.CopyBytes(input.Buffer, input.StartIndex, bytesToCopy);
input.ConsumeBytes(bytesToCopy);
bytesConsumedFromInput += bytesToCopy;
}
GetCompressedOutput(output);
} while (SafeToWriteTo(output) && InputAvailable(input) && (maxBytesToCopy < 1 || bytesConsumedFromInput < maxBytesToCopy));
// determine compression ratio, save
int bytesWrittenPost = output.BytesWritten;
int bytesWritten = bytesWrittenPost - bytesWrittenPre;
int inputBytesPost = BytesInHistory + input.Count;
int totalBytesConsumed = inputBytesPre - inputBytesPost;
if (bytesWritten != 0) {
lastCompressionRatio = (double)bytesWritten / (double)totalBytesConsumed;
}
}
// compress the bytes in input history window
private void GetCompressedOutput(OutputBuffer output) {
while (inputWindow.BytesAvailable > 0 && SafeToWriteTo(output)) {
// Find next match. A match can be a symbol,
// a distance/length pair, a symbol followed by a distance/Length pair
inputWindow.GetNextSymbolOrMatch(currentMatch);
if (currentMatch.State == MatchState.HasSymbol) {
WriteChar(currentMatch.Symbol, output);
}
else if (currentMatch.State == MatchState.HasMatch) {
WriteMatch(currentMatch.Length, currentMatch.Position, output);
}
else {
WriteChar(currentMatch.Symbol, output);
WriteMatch(currentMatch.Length, currentMatch.Position, output);
}
}
}
private bool InputAvailable(DeflateInput input) {
return input.Count > 0 || BytesInHistory > 0;
}
private bool SafeToWriteTo(OutputBuffer output) { // can we safely continue writing to output buffer
return output.FreeBytes > FastEncoderStatics.MaxCodeLen;
}
private void WriteEndOfBlock(OutputBuffer output) {
// The fast encoder outputs one long block, so it just needs to terminate this block
const int EndOfBlockCode = 256;
uint code_info = FastEncoderStatics.FastEncoderLiteralCodeInfo[EndOfBlockCode];
int code_len = (int)(code_info & 31);
output.WriteBits(code_len, code_info >> 5);
}
static internal void WriteMatch(int matchLen, int matchPos, OutputBuffer output) {
Debug.Assert(matchLen >= FastEncoderWindow.MinMatch && matchLen <= FastEncoderWindow.MaxMatch, "Illegal currentMatch length!");
// Get the code information for a match code
uint codeInfo = FastEncoderStatics.FastEncoderLiteralCodeInfo[(FastEncoderStatics.NumChars + 1 - FastEncoderWindow.MinMatch) + matchLen];
int codeLen = (int)codeInfo & 31;
Debug.Assert(codeLen != 0, "Invalid Match Length!");
if (codeLen <= 16) {
output.WriteBits(codeLen, codeInfo >> 5);
}
else {
output.WriteBits(16, (codeInfo >> 5) & 65535);
output.WriteBits(codeLen - 16, codeInfo >> (5 + 16));
}
// Get the code information for a distance code
codeInfo = FastEncoderStatics.FastEncoderDistanceCodeInfo[FastEncoderStatics.GetSlot(matchPos)];
output.WriteBits((int)(codeInfo & 15), codeInfo >> 8);
int extraBits = (int)(codeInfo >> 4) & 15;
if (extraBits != 0) {
output.WriteBits(extraBits, (uint)matchPos & FastEncoderStatics.BitMask[extraBits]);
}
}
static internal void WriteChar(byte b, OutputBuffer output) {
uint code = FastEncoderStatics.FastEncoderLiteralCodeInfo[b];
output.WriteBits((int)code & 31, code >> 5);
}
// Output the block type and tree structure for our hard-coded trees.
// Contains following data:
// "final" block flag 1 bit
// BLOCKTYPE_DYNAMIC 2 bits
// FastEncoderLiteralTreeLength
// FastEncoderDistanceTreeLength
//
static internal void WriteDeflatePreamble(OutputBuffer output) {
//Debug.Assert( bitCount == 0, "bitCount must be zero before writing tree bit!");
output.WriteBytes(FastEncoderStatics.FastEncoderTreeStructureData, 0, FastEncoderStatics.FastEncoderTreeStructureData.Length);
output.WriteBits(FastEncoderStatics.FastEncoderPostTreeBitCount, FastEncoderStatics.FastEncoderPostTreeBitBuf);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fc22362256cea804682de8d27bc3b062
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,231 @@
namespace Unity.IO.Compression {
using System.Diagnostics;
using System.Globalization;
internal static class FastEncoderStatics {
// static information for encoding, DO NOT MODIFY
internal static readonly byte[] FastEncoderTreeStructureData = {
0xec,0xbd,0x07,0x60,0x1c,0x49,0x96,0x25,0x26,0x2f,0x6d,0xca,
0x7b,0x7f,0x4a,0xf5,0x4a,0xd7,0xe0,0x74,0xa1,0x08,0x80,0x60,
0x13,0x24,0xd8,0x90,0x40,0x10,0xec,0xc1,0x88,0xcd,0xe6,0x92,
0xec,0x1d,0x69,0x47,0x23,0x29,0xab,0x2a,0x81,0xca,0x65,0x56,
0x65,0x5d,0x66,0x16,0x40,0xcc,0xed,0x9d,0xbc,0xf7,0xde,0x7b,
0xef,0xbd,0xf7,0xde,0x7b,0xef,0xbd,0xf7,0xba,0x3b,0x9d,0x4e,
0x27,0xf7,0xdf,0xff,0x3f,0x5c,0x66,0x64,0x01,0x6c,0xf6,0xce,
0x4a,0xda,0xc9,0x9e,0x21,0x80,0xaa,0xc8,0x1f,0x3f,0x7e,0x7c,
0x1f,0x3f,
};
internal static readonly byte[] BFinalFastEncoderTreeStructureData = {
0xed,0xbd,0x07,0x60,0x1c,0x49,0x96,0x25,0x26,0x2f,0x6d,0xca,
0x7b,0x7f,0x4a,0xf5,0x4a,0xd7,0xe0,0x74,0xa1,0x08,0x80,0x60,
0x13,0x24,0xd8,0x90,0x40,0x10,0xec,0xc1,0x88,0xcd,0xe6,0x92,
0xec,0x1d,0x69,0x47,0x23,0x29,0xab,0x2a,0x81,0xca,0x65,0x56,
0x65,0x5d,0x66,0x16,0x40,0xcc,0xed,0x9d,0xbc,0xf7,0xde,0x7b,
0xef,0xbd,0xf7,0xde,0x7b,0xef,0xbd,0xf7,0xba,0x3b,0x9d,0x4e,
0x27,0xf7,0xdf,0xff,0x3f,0x5c,0x66,0x64,0x01,0x6c,0xf6,0xce,
0x4a,0xda,0xc9,0x9e,0x21,0x80,0xaa,0xc8,0x1f,0x3f,0x7e,0x7c,
0x1f,0x3f,
};
// Output a currentMatch with length matchLen (>= MIN_MATCH) and displacement matchPos
//
// Optimisation: unlike the other encoders, here we have an array of codes for each currentMatch
// length (not just each currentMatch length slot), complete with all the extra bits filled in, in
// a single array element.
//
// There are many advantages to doing this:
//
// 1. A single array lookup on g_FastEncoderLiteralCodeInfo, instead of separate array lookups
// on g_LengthLookup (to get the length slot), g_FastEncoderLiteralTreeLength,
// g_FastEncoderLiteralTreeCode, g_ExtraLengthBits, and g_BitMask
//
// 2. The array is an array of ULONGs, so no access penalty, unlike for accessing those USHORT
// code arrays in the other encoders (although they could be made into ULONGs with some
// modifications to the source).
//
// Note, if we could guarantee that codeLen <= 16 always, then we could skip an if statement here.
//
// A completely different optimisation is used for the distance codes since, obviously, a table for
// all 8192 distances combining their extra bits is not feasible. The distance codeinfo table is
// made up of code[], len[] and # extraBits for this code.
//
// The advantages are similar to the above; a ULONG array instead of a USHORT and BYTE array, better
// cache locality, fewer memory operations.
//
// Encoding information for literal and Length.
// The least 5 significant bits are the length
// and the rest is the code bits.
internal static readonly uint[] FastEncoderLiteralCodeInfo = {
0x0000d7ee,0x0004d7ee,0x0002d7ee,0x0006d7ee,0x0001d7ee,0x0005d7ee,0x0003d7ee,
0x0007d7ee,0x000037ee,0x0000c7ec,0x00000126,0x000437ee,0x000237ee,0x000637ee,
0x000137ee,0x000537ee,0x000337ee,0x000737ee,0x0000b7ee,0x0004b7ee,0x0002b7ee,
0x0006b7ee,0x0001b7ee,0x0005b7ee,0x0003b7ee,0x0007b7ee,0x000077ee,0x000477ee,
0x000277ee,0x000677ee,0x000017ed,0x000177ee,0x00000526,0x000577ee,0x000023ea,
0x0001c7ec,0x000377ee,0x000777ee,0x000217ed,0x000063ea,0x00000b68,0x00000ee9,
0x00005beb,0x000013ea,0x00000467,0x00001b68,0x00000c67,0x00002ee9,0x00000768,
0x00001768,0x00000f68,0x00001ee9,0x00001f68,0x00003ee9,0x000053ea,0x000001e9,
0x000000e8,0x000021e9,0x000011e9,0x000010e8,0x000031e9,0x000033ea,0x000008e8,
0x0000f7ee,0x0004f7ee,0x000018e8,0x000009e9,0x000004e8,0x000029e9,0x000014e8,
0x000019e9,0x000073ea,0x0000dbeb,0x00000ce8,0x00003beb,0x0002f7ee,0x000039e9,
0x00000bea,0x000005e9,0x00004bea,0x000025e9,0x000027ec,0x000015e9,0x000035e9,
0x00000de9,0x00002bea,0x000127ec,0x0000bbeb,0x0006f7ee,0x0001f7ee,0x0000a7ec,
0x00007beb,0x0005f7ee,0x0000fbeb,0x0003f7ee,0x0007f7ee,0x00000fee,0x00000326,
0x00000267,0x00000a67,0x00000667,0x00000726,0x00001ce8,0x000002e8,0x00000e67,
0x000000a6,0x0001a7ec,0x00002de9,0x000004a6,0x00000167,0x00000967,0x000002a6,
0x00000567,0x000117ed,0x000006a6,0x000001a6,0x000005a6,0x00000d67,0x000012e8,
0x00000ae8,0x00001de9,0x00001ae8,0x000007eb,0x000317ed,0x000067ec,0x000097ed,
0x000297ed,0x00040fee,0x00020fee,0x00060fee,0x00010fee,0x00050fee,0x00030fee,
0x00070fee,0x00008fee,0x00048fee,0x00028fee,0x00068fee,0x00018fee,0x00058fee,
0x00038fee,0x00078fee,0x00004fee,0x00044fee,0x00024fee,0x00064fee,0x00014fee,
0x00054fee,0x00034fee,0x00074fee,0x0000cfee,0x0004cfee,0x0002cfee,0x0006cfee,
0x0001cfee,0x0005cfee,0x0003cfee,0x0007cfee,0x00002fee,0x00042fee,0x00022fee,
0x00062fee,0x00012fee,0x00052fee,0x00032fee,0x00072fee,0x0000afee,0x0004afee,
0x0002afee,0x0006afee,0x0001afee,0x0005afee,0x0003afee,0x0007afee,0x00006fee,
0x00046fee,0x00026fee,0x00066fee,0x00016fee,0x00056fee,0x00036fee,0x00076fee,
0x0000efee,0x0004efee,0x0002efee,0x0006efee,0x0001efee,0x0005efee,0x0003efee,
0x0007efee,0x00001fee,0x00041fee,0x00021fee,0x00061fee,0x00011fee,0x00051fee,
0x00031fee,0x00071fee,0x00009fee,0x00049fee,0x00029fee,0x00069fee,0x00019fee,
0x00059fee,0x00039fee,0x00079fee,0x00005fee,0x00045fee,0x00025fee,0x00065fee,
0x00015fee,0x00055fee,0x00035fee,0x00075fee,0x0000dfee,0x0004dfee,0x0002dfee,
0x0006dfee,0x0001dfee,0x0005dfee,0x0003dfee,0x0007dfee,0x00003fee,0x00043fee,
0x00023fee,0x00063fee,0x00013fee,0x00053fee,0x00033fee,0x00073fee,0x0000bfee,
0x0004bfee,0x0002bfee,0x0006bfee,0x0001bfee,0x0005bfee,0x0003bfee,0x0007bfee,
0x00007fee,0x00047fee,0x00027fee,0x00067fee,0x00017fee,0x000197ed,0x000397ed,
0x000057ed,0x00057fee,0x000257ed,0x00037fee,0x000157ed,0x00077fee,0x000357ed,
0x0000ffee,0x0004ffee,0x0002ffee,0x0006ffee,0x0001ffee,0x00000084,0x00000003,
0x00000184,0x00000044,0x00000144,0x000000c5,0x000002c5,0x000001c5,0x000003c6,
0x000007c6,0x00000026,0x00000426,0x000003a7,0x00000ba7,0x000007a7,0x00000fa7,
0x00000227,0x00000627,0x00000a27,0x00000e27,0x00000068,0x00000868,0x00001068,
0x00001868,0x00000369,0x00001369,0x00002369,0x00003369,0x000006ea,0x000026ea,
0x000046ea,0x000066ea,0x000016eb,0x000036eb,0x000056eb,0x000076eb,0x000096eb,
0x0000b6eb,0x0000d6eb,0x0000f6eb,0x00003dec,0x00007dec,0x0000bdec,0x0000fdec,
0x00013dec,0x00017dec,0x0001bdec,0x0001fdec,0x00006bed,0x0000ebed,0x00016bed,
0x0001ebed,0x00026bed,0x0002ebed,0x00036bed,0x0003ebed,0x000003ec,0x000043ec,
0x000083ec,0x0000c3ec,0x000103ec,0x000143ec,0x000183ec,0x0001c3ec,0x00001bee,
0x00009bee,0x00011bee,0x00019bee,0x00021bee,0x00029bee,0x00031bee,0x00039bee,
0x00041bee,0x00049bee,0x00051bee,0x00059bee,0x00061bee,0x00069bee,0x00071bee,
0x00079bee,0x000167f0,0x000367f0,0x000567f0,0x000767f0,0x000967f0,0x000b67f0,
0x000d67f0,0x000f67f0,0x001167f0,0x001367f0,0x001567f0,0x001767f0,0x001967f0,
0x001b67f0,0x001d67f0,0x001f67f0,0x000087ef,0x000187ef,0x000287ef,0x000387ef,
0x000487ef,0x000587ef,0x000687ef,0x000787ef,0x000887ef,0x000987ef,0x000a87ef,
0x000b87ef,0x000c87ef,0x000d87ef,0x000e87ef,0x000f87ef,0x0000e7f0,0x0002e7f0,
0x0004e7f0,0x0006e7f0,0x0008e7f0,0x000ae7f0,0x000ce7f0,0x000ee7f0,0x0010e7f0,
0x0012e7f0,0x0014e7f0,0x0016e7f0,0x0018e7f0,0x001ae7f0,0x001ce7f0,0x001ee7f0,
0x0005fff3,0x000dfff3,0x0015fff3,0x001dfff3,0x0025fff3,0x002dfff3,0x0035fff3,
0x003dfff3,0x0045fff3,0x004dfff3,0x0055fff3,0x005dfff3,0x0065fff3,0x006dfff3,
0x0075fff3,0x007dfff3,0x0085fff3,0x008dfff3,0x0095fff3,0x009dfff3,0x00a5fff3,
0x00adfff3,0x00b5fff3,0x00bdfff3,0x00c5fff3,0x00cdfff3,0x00d5fff3,0x00ddfff3,
0x00e5fff3,0x00edfff3,0x00f5fff3,0x00fdfff3,0x0003fff3,0x000bfff3,0x0013fff3,
0x001bfff3,0x0023fff3,0x002bfff3,0x0033fff3,0x003bfff3,0x0043fff3,0x004bfff3,
0x0053fff3,0x005bfff3,0x0063fff3,0x006bfff3,0x0073fff3,0x007bfff3,0x0083fff3,
0x008bfff3,0x0093fff3,0x009bfff3,0x00a3fff3,0x00abfff3,0x00b3fff3,0x00bbfff3,
0x00c3fff3,0x00cbfff3,0x00d3fff3,0x00dbfff3,0x00e3fff3,0x00ebfff3,0x00f3fff3,
0x00fbfff3,0x0007fff3,0x000ffff3,0x0017fff3,0x001ffff3,0x0027fff3,0x002ffff3,
0x0037fff3,0x003ffff3,0x0047fff3,0x004ffff3,0x0057fff3,0x005ffff3,0x0067fff3,
0x006ffff3,0x0077fff3,0x007ffff3,0x0087fff3,0x008ffff3,0x0097fff3,0x009ffff3,
0x00a7fff3,0x00affff3,0x00b7fff3,0x00bffff3,0x00c7fff3,0x00cffff3,0x00d7fff3,
0x00dffff3,0x00e7fff3,0x00effff3,0x00f7fff3,0x00fffff3,0x0001e7f1,0x0003e7f1,
0x0005e7f1,0x0007e7f1,0x0009e7f1,0x000be7f1,0x000de7f1,0x000fe7f1,0x0011e7f1,
0x0013e7f1,0x0015e7f1,0x0017e7f1,0x0019e7f1,0x001be7f1,0x001de7f1,0x001fe7f1,
0x0021e7f1,0x0023e7f1,0x0025e7f1,0x0027e7f1,0x0029e7f1,0x002be7f1,0x002de7f1,
0x002fe7f1,0x0031e7f1,0x0033e7f1,0x0035e7f1,0x0037e7f1,0x0039e7f1,0x003be7f1,
0x003de7f1,0x000047eb,
};
internal static readonly uint[] FastEncoderDistanceCodeInfo = {
0x00000f06,0x0001ff0a,0x0003ff0b,0x0007ff0b,0x0000ff19,0x00003f18,0x0000bf28,
0x00007f28,0x00001f37,0x00005f37,0x00000d45,0x00002f46,0x00000054,0x00001d55,
0x00000864,0x00000365,0x00000474,0x00001375,0x00000c84,0x00000284,0x00000a94,
0x00000694,0x00000ea4,0x000001a4,0x000009b4,0x00000bb5,0x000005c4,0x00001bc5,
0x000007d5,0x000017d5,0x00000000,0x00000100,
};
internal static readonly uint[] BitMask = { 0, 1, 3, 7, 15, 31, 63, 127, 255, 511, 1023, 2047, 4095, 8191, 16383, 32767 };
internal static readonly byte[] ExtraLengthBits = { 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0 };
internal static readonly byte[] ExtraDistanceBits = { 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 0, 0 };
internal const int NumChars = 256;
internal const int NumLengthBaseCodes = 29;
internal const int NumDistBaseCodes = 30;
internal const uint FastEncoderPostTreeBitBuf = 0x0022;
internal const int FastEncoderPostTreeBitCount = 9;
internal const uint NoCompressionHeader = 0x0;
internal const int NoCompressionHeaderBitCount = 3;
internal const uint BFinalNoCompressionHeader = 0x1;
internal const int BFinalNoCompressionHeaderBitCount = 3;
internal const int MaxCodeLen = 16;
static private byte[] distLookup;
static FastEncoderStatics() {
distLookup = new byte[512];
// Generate the global slot tables which allow us to convert a distance
// (0..32K) to a distance slot (0..29)
//
// Distance table
// Extra Extra Extra
// Code Bits Dist Code Bits Dist Code Bits Distance
// ---- ---- ---- ---- ---- ------ ---- ---- --------
// 0 0 1 10 4 33-48 20 9 1025-1536
// 1 0 2 11 4 49-64 21 9 1537-2048
// 2 0 3 12 5 65-96 22 10 2049-3072
// 3 0 4 13 5 97-128 23 10 3073-4096
// 4 1 5,6 14 6 129-192 24 11 4097-6144
// 5 1 7,8 15 6 193-256 25 11 6145-8192
// 6 2 9-12 16 7 257-384 26 12 8193-12288
// 7 2 13-16 17 7 385-512 27 12 12289-16384
// 8 3 17-24 18 8 513-768 28 13 16385-24576
// 9 3 25-32 19 8 769-1024 29 13 24577-32768
// Initialize the mapping length (0..255) -> length code (0..28)
//int length = 0;
//for (code = 0; code < FastEncoderStatics.NumLengthBaseCodes-1; code++) {
// for (int n = 0; n < (1 << FastEncoderStatics.ExtraLengthBits[code]); n++)
// lengthLookup[length++] = (byte) code;
//}
//lengthLookup[length-1] = (byte) code;
// Initialize the mapping dist (0..32K) -> dist code (0..29)
int dist = 0;
int code;
for (code = 0; code < 16; code++) {
for (int n = 0; n < (1 << FastEncoderStatics.ExtraDistanceBits[code]); n++)
distLookup[dist++] = (byte)code;
}
dist >>= 7; // from now on, all distances are divided by 128
for (; code < FastEncoderStatics.NumDistBaseCodes; code++) {
for (int n = 0; n < (1 << (FastEncoderStatics.ExtraDistanceBits[code] - 7)); n++)
distLookup[256 + dist++] = (byte)code;
}
}
// Return the position slot (0...29) of a match offset (0...32767)
static internal int GetSlot(int pos) {
return distLookup[((pos) < 256) ? (pos) : (256 + ((pos) >> 7))];
}
// Reverse 'length' of the bits in code
public static uint BitReverse(uint code, int length) {
uint new_code = 0;
Debug.Assert(length > 0 && length <= 16, "Invalid len");
do {
new_code |= (code & 1);
new_code <<= 1;
code >>= 1;
} while (--length > 0);
return new_code >> 1;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b0a0b1312c597c549862a2990eece005
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,391 @@
namespace Unity.IO.Compression {
using System;
using System.Diagnostics;
internal class FastEncoderWindow {
private byte[] window; // complete bytes window
private int bufPos; // the start index of uncompressed bytes
private int bufEnd; // the end index of uncompressed bytes
// Be very careful about increasing the window size; the code tables will have to
// be updated, since they assume that extra_distance_bits is never larger than a
// certain size.
const int FastEncoderHashShift = 4;
const int FastEncoderHashtableSize = 2048;
const int FastEncoderHashMask = FastEncoderHashtableSize-1;
const int FastEncoderWindowSize = 8192;
const int FastEncoderWindowMask = FastEncoderWindowSize - 1;
const int FastEncoderMatch3DistThreshold = 16384;
internal const int MaxMatch = 258;
internal const int MinMatch = 3;
// Following constants affect the search,
// they should be modifiable if we support different compression levels in future.
const int SearchDepth = 32;
const int GoodLength = 4;
const int NiceLength = 32;
const int LazyMatchThreshold = 6;
// Hashtable structure
private ushort[] prev; // next most recent occurance of chars with same hash value
private ushort[] lookup; // hash table to find most recent occurance of chars with same hash value
public FastEncoderWindow() {
ResetWindow();
}
public int BytesAvailable { // uncompressed bytes
get {
Debug.Assert(bufEnd - bufPos >= 0, "Ending pointer can't be in front of starting pointer!");
return bufEnd - bufPos;
}
}
public DeflateInput UnprocessedInput {
get {
DeflateInput input = new DeflateInput();
input.Buffer = window;
input.StartIndex = bufPos;
input.Count = bufEnd - bufPos;
return input;
}
}
public void FlushWindow() {
ResetWindow();
}
private void ResetWindow() {
window = new byte[2 * FastEncoderWindowSize + MaxMatch + 4];
prev = new ushort[FastEncoderWindowSize + MaxMatch];
lookup = new ushort[FastEncoderHashtableSize];
bufPos = FastEncoderWindowSize;
bufEnd = bufPos;
}
public int FreeWindowSpace { // Free space in the window
get {
return 2 * FastEncoderWindowSize - bufEnd;
}
}
// copy bytes from input buffer into window
public void CopyBytes(byte[] inputBuffer, int startIndex, int count) {
Array.Copy(inputBuffer, startIndex, window, bufEnd, count);
bufEnd += count;
}
// slide the history window to the left by FastEncoderWindowSize bytes
public void MoveWindows() {
int i;
Debug.Assert(bufPos == 2*FastEncoderWindowSize, "only call this at the end of the window");
// verify that the hash table is correct
VerifyHashes(); // Debug only code
Array.Copy(window, bufPos - FastEncoderWindowSize, window, 0, FastEncoderWindowSize);
// move all the hash pointers back
for (i = 0; i < FastEncoderHashtableSize; i++) {
int val = ((int) lookup[i]) - FastEncoderWindowSize;
if (val <= 0) { // too far away now? then set to zero
lookup[i] = (ushort) 0;
} else {
lookup[i] = (ushort) val;
}
}
// prev[]'s are absolute pointers, not relative pointers, so we have to move them back too
// making prev[]'s into relative pointers poses problems of its own
for (i = 0; i < FastEncoderWindowSize; i++) {
long val = ((long) prev[i]) - FastEncoderWindowSize;
if (val <= 0) {
prev[i] = (ushort) 0;
} else {
prev[i] = (ushort) val;
}
}
#if DEBUG
// For debugging, wipe the window clean, so that if there is a bug in our hashing,
// the hash pointers will now point to locations which are not valid for the hash value
// (and will be caught by our ASSERTs).
Array.Clear(window, FastEncoderWindowSize, window.Length - FastEncoderWindowSize);
#endif
VerifyHashes(); // debug: verify hash table is correct
bufPos = FastEncoderWindowSize;
bufEnd = bufPos;
}
private uint HashValue(uint hash, byte b) {
return(hash << FastEncoderHashShift) ^ b;
}
// insert string into hash table and return most recent location of same hash value
private uint InsertString(ref uint hash) {
// Note we only use the lowest 11 bits of the hash vallue (hash table size is 11).
// This enables fast calculation of hash value for the input string.
// If we want to get the next hash code starting at next position,
// we can just increment bufPos and call this function.
hash = HashValue( hash, window[bufPos+2] );
// Need to assert the hash value
uint search = lookup[hash & FastEncoderHashMask];
lookup[hash & FastEncoderHashMask] = (ushort) bufPos;
prev[bufPos & FastEncoderWindowMask] = (ushort) search;
return search;
}
//
// insert strings into hashtable
// Arguments:
// hash : intial hash value
// matchLen : 1 + number of strings we need to insert.
//
private void InsertStrings(ref uint hash, int matchLen) {
Debug.Assert(matchLen > 0, "Invalid match Len!");
if (bufEnd - bufPos <= matchLen) {
bufPos += (matchLen-1);
}
else {
while (--matchLen > 0) {
InsertString(ref hash);
bufPos++;
}
}
}
//
// Find out what we should generate next. It can be a symbol, a distance/length pair
// or a symbol followed by distance/length pair
//
internal bool GetNextSymbolOrMatch(Match match) {
Debug.Assert(bufPos >= FastEncoderWindowSize && bufPos < (2*FastEncoderWindowSize), "Invalid Buffer Position!");
// initialise the value of the hash, no problem if locations bufPos, bufPos+1
// are invalid (not enough data), since we will never insert using that hash value
uint hash = HashValue( 0 , window[bufPos]);
hash = HashValue( hash , window[bufPos + 1]);
int matchLen;
int matchPos = 0;
VerifyHashes(); // Debug only code
if (bufEnd - bufPos <= 3) {
// The hash value becomes corrupt when we get within 3 characters of the end of the
// input window, since the hash value is based on 3 characters. We just stop
// inserting into the hash table at this point, and allow no matches.
matchLen = 0;
}
else {
// insert string into hash table and return most recent location of same hash value
int search = (int)InsertString(ref hash);
// did we find a recent location of this hash value?
if (search != 0) {
// yes, now find a match at what we'll call position X
matchLen = FindMatch(search, out matchPos, SearchDepth, NiceLength);
// truncate match if we're too close to the end of the input window
if (bufPos + matchLen > bufEnd)
matchLen = bufEnd - bufPos;
}
else {
// no most recent location found
matchLen = 0;
}
}
if (matchLen < MinMatch) {
// didn't find a match, so output unmatched char
match.State = MatchState.HasSymbol;
match.Symbol = window[bufPos];
bufPos++;
}
else {
// bufPos now points to X+1
bufPos++;
// is this match so good (long) that we should take it automatically without
// checking X+1 ?
if (matchLen <= LazyMatchThreshold) {
int nextMatchLen;
int nextMatchPos = 0;
// search at position X+1
int search = (int)InsertString(ref hash);
// no, so check for a better match at X+1
if (search != 0) {
nextMatchLen = FindMatch(search, out nextMatchPos,
matchLen < GoodLength ? SearchDepth : (SearchDepth >> 2),NiceLength);
// truncate match if we're too close to the end of the window
// note: nextMatchLen could now be < MinMatch
if (bufPos + nextMatchLen > bufEnd) {
nextMatchLen = bufEnd - bufPos;
}
} else {
nextMatchLen = 0;
}
// right now X and X+1 are both inserted into the search tree
if (nextMatchLen > matchLen) {
// since nextMatchLen > matchLen, it can't be < MinMatch here
// match at X+1 is better, so output unmatched char at X
match.State = MatchState.HasSymbolAndMatch;
match.Symbol = window[bufPos-1];
match.Position = nextMatchPos;
match.Length = nextMatchLen;
// insert remainder of second match into search tree
// example: (*=inserted already)
//
// X X+1 X+2 X+3 X+4
// * *
// nextmatchlen=3
// bufPos
//
// If nextMatchLen == 3, we want to perform 2
// insertions (at X+2 and X+3). However, first we must
// inc bufPos.
//
bufPos++; // now points to X+2
matchLen = nextMatchLen;
InsertStrings(ref hash, matchLen);
} else {
// match at X is better, so take it
match.State = MatchState.HasMatch;
match.Position = matchPos;
match.Length = matchLen;
// Insert remainder of first match into search tree, minus the first
// two locations, which were inserted by the FindMatch() calls.
//
// For example, if matchLen == 3, then we've inserted at X and X+1
// already (and bufPos is now pointing at X+1), and now we need to insert
// only at X+2.
//
matchLen--;
bufPos++; // now bufPos points to X+2
InsertStrings(ref hash, matchLen);
}
} else { // match_length >= good_match
// in assertion: bufPos points to X+1, location X inserted already
// first match is so good that we're not even going to check at X+1
match.State = MatchState.HasMatch;
match.Position = matchPos;
match.Length = matchLen;
// insert remainder of match at X into search tree
InsertStrings(ref hash, matchLen);
}
}
if (bufPos == 2*FastEncoderWindowSize) {
MoveWindows();
}
return true;
}
//
// Find a match starting at specified position and return length of match
// Arguments:
// search : where to start searching
// matchPos : return match position here
// searchDepth : # links to traverse
// NiceLength : stop immediately if we find a match >= NiceLength
//
int FindMatch(int search, out int matchPos, int searchDepth, int niceLength ) {
Debug.Assert(bufPos >= 0 && bufPos < 2*FastEncoderWindowSize, "Invalid Buffer position!");
Debug.Assert(search < bufPos, "Invalid starting search point!");
Debug.Assert(RecalculateHash((int)search) == RecalculateHash(bufPos));
int bestMatch = 0; // best match length found so far
int bestMatchPos = 0; // absolute match position of best match found
// the earliest we can look
int earliest = bufPos - FastEncoderWindowSize;
Debug.Assert(earliest >= 0, "bufPos is less than FastEncoderWindowSize!");
byte wantChar = window[bufPos];
while (search > earliest) {
// make sure all our hash links are valid
Debug.Assert(RecalculateHash((int)search) == RecalculateHash(bufPos), "Corrupted hash link!");
// Start by checking the character that would allow us to increase the match
// length by one. This improves performance quite a bit.
if (window[search + bestMatch] == wantChar) {
int j;
// Now make sure that all the other characters are correct
for (j = 0; j < MaxMatch; j++) {
if (window[bufPos+j] != window[search+j])
break;
}
if (j > bestMatch) {
bestMatch = j;
bestMatchPos = search; // absolute position
if (j > NiceLength) break;
wantChar = window[bufPos+j];
}
}
if (--searchDepth == 0) {
break;
}
Debug.Assert(prev[search & FastEncoderWindowMask] < search, "we should always go backwards!");
search = prev[search & FastEncoderWindowMask];
}
// doesn't necessarily mean we found a match; bestMatch could be > 0 and < MinMatch
matchPos = bufPos - bestMatchPos - 1; // convert absolute to relative position
// don't allow match length 3's which are too far away to be worthwhile
if (bestMatch == 3 && matchPos >= FastEncoderMatch3DistThreshold) {
return 0;
}
Debug.Assert(bestMatch < MinMatch || matchPos < FastEncoderWindowSize, "Only find match inside FastEncoderWindowSize");
return bestMatch;
}
[Conditional("DEBUG")]
void VerifyHashes() {
for (int i = 0; i < FastEncoderHashtableSize; i++) {
ushort where = lookup[i];
ushort nextWhere;
while (where != 0 && bufPos - where < FastEncoderWindowSize) {
Debug.Assert(RecalculateHash(where) == i, "Incorrect Hashcode!");
nextWhere = prev[where & FastEncoderWindowMask];
if (bufPos - nextWhere >= FastEncoderWindowSize) {
break;
}
Debug.Assert(nextWhere < where, "pointer is messed up!");
where = nextWhere;
}
}
}
// can't use conditional attribute here.
uint RecalculateHash(int position) {
return (uint)(((window[position] << (2*FastEncoderHashShift)) ^
(window[position+1] << FastEncoderHashShift) ^
(window[position+2])) & FastEncoderHashMask);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e479f6f2cc5ce854d9eb5e17e0f5e4b6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,16 @@
namespace Unity.IO.Compression
{
interface IFileFormatWriter {
byte[] GetHeader();
void UpdateWithBytesRead(byte[] buffer, int offset, int bytesToCopy);
byte[] GetFooter();
}
interface IFileFormatReader {
bool ReadHeader(InputBuffer input);
bool ReadFooter(InputBuffer input);
void UpdateWithBytesRead(byte[] buffer, int offset, int bytesToCopy);
void Validate();
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a719bacb9b3275148ae399b0a64110fa
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,317 @@
namespace Unity.IO.Compression {
using System;
using System.Diagnostics;
// This class decodes GZip header and footer information.
// See RFC 1952 for details about the format.
internal class GZipDecoder : IFileFormatReader {
private GzipHeaderState gzipHeaderSubstate;
private GzipHeaderState gzipFooterSubstate;
private int gzip_header_flag;
private int gzip_header_xlen;
private uint expectedCrc32;
private uint expectedOutputStreamSizeModulo;
private int loopCounter;
private uint actualCrc32;
private long actualStreamSizeModulo;
public GZipDecoder() {
Reset();
}
public void Reset() {
gzipHeaderSubstate = GzipHeaderState.ReadingID1;
gzipFooterSubstate = GzipHeaderState.ReadingCRC;
expectedCrc32 = 0;
expectedOutputStreamSizeModulo = 0;
}
public bool ReadHeader(InputBuffer input) {
while (true) {
int bits;
switch (gzipHeaderSubstate) {
case GzipHeaderState.ReadingID1:
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
if (bits != GZipConstants.ID1) {
throw new InvalidDataException(SR.GetString(SR.CorruptedGZipHeader));
}
gzipHeaderSubstate = GzipHeaderState.ReadingID2;
goto case GzipHeaderState.ReadingID2;
case GzipHeaderState.ReadingID2:
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
if (bits != GZipConstants.ID2) {
throw new InvalidDataException(SR.GetString(SR.CorruptedGZipHeader));
}
gzipHeaderSubstate = GzipHeaderState.ReadingCM;
goto case GzipHeaderState.ReadingCM;
case GzipHeaderState.ReadingCM:
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
if (bits != GZipConstants.Deflate) { // compression mode must be 8 (deflate)
throw new InvalidDataException(SR.GetString(SR.UnknownCompressionMode));
}
gzipHeaderSubstate = GzipHeaderState.ReadingFLG; ;
goto case GzipHeaderState.ReadingFLG;
case GzipHeaderState.ReadingFLG:
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
gzip_header_flag = bits;
gzipHeaderSubstate = GzipHeaderState.ReadingMMTime;
loopCounter = 0; // 4 MMTIME bytes
goto case GzipHeaderState.ReadingMMTime;
case GzipHeaderState.ReadingMMTime:
bits = 0;
while (loopCounter < 4) {
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
loopCounter++;
}
gzipHeaderSubstate = GzipHeaderState.ReadingXFL;
loopCounter = 0;
goto case GzipHeaderState.ReadingXFL;
case GzipHeaderState.ReadingXFL: // ignore XFL
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
gzipHeaderSubstate = GzipHeaderState.ReadingOS;
goto case GzipHeaderState.ReadingOS;
case GzipHeaderState.ReadingOS: // ignore OS
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
gzipHeaderSubstate = GzipHeaderState.ReadingXLen1;
goto case GzipHeaderState.ReadingXLen1;
case GzipHeaderState.ReadingXLen1:
if ((gzip_header_flag & (int)GZipOptionalHeaderFlags.ExtraFieldsFlag) == 0) {
goto case GzipHeaderState.ReadingFileName;
}
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
gzip_header_xlen = bits;
gzipHeaderSubstate = GzipHeaderState.ReadingXLen2;
goto case GzipHeaderState.ReadingXLen2;
case GzipHeaderState.ReadingXLen2:
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
gzip_header_xlen |= (bits << 8);
gzipHeaderSubstate = GzipHeaderState.ReadingXLenData;
loopCounter = 0; // 0 bytes of XLEN data read so far
goto case GzipHeaderState.ReadingXLenData;
case GzipHeaderState.ReadingXLenData:
bits = 0;
while (loopCounter < gzip_header_xlen) {
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
loopCounter++;
}
gzipHeaderSubstate = GzipHeaderState.ReadingFileName;
loopCounter = 0;
goto case GzipHeaderState.ReadingFileName;
case GzipHeaderState.ReadingFileName:
if ((gzip_header_flag & (int)GZipOptionalHeaderFlags.FileNameFlag) == 0) {
gzipHeaderSubstate = GzipHeaderState.ReadingComment;
goto case GzipHeaderState.ReadingComment;
}
do {
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
if (bits == 0) { // see '\0' in the file name string
break;
}
} while (true);
gzipHeaderSubstate = GzipHeaderState.ReadingComment;
goto case GzipHeaderState.ReadingComment;
case GzipHeaderState.ReadingComment:
if ((gzip_header_flag & (int)GZipOptionalHeaderFlags.CommentFlag) == 0) {
gzipHeaderSubstate = GzipHeaderState.ReadingCRC16Part1;
goto case GzipHeaderState.ReadingCRC16Part1;
}
do {
bits = input.GetBits(8);
if (bits < 0) {
return false;
}
if (bits == 0) { // see '\0' in the file name string
break;
}
} while (true);
gzipHeaderSubstate = GzipHeaderState.ReadingCRC16Part1;
goto case GzipHeaderState.ReadingCRC16Part1;
case GzipHeaderState.ReadingCRC16Part1:
if ((gzip_header_flag & (int)GZipOptionalHeaderFlags.CRCFlag) == 0) {
gzipHeaderSubstate = GzipHeaderState.Done;
goto case GzipHeaderState.Done;
}
bits = input.GetBits(8); // ignore crc
if (bits < 0) {
return false;
}
gzipHeaderSubstate = GzipHeaderState.ReadingCRC16Part2;
goto case GzipHeaderState.ReadingCRC16Part2;
case GzipHeaderState.ReadingCRC16Part2:
bits = input.GetBits(8); // ignore crc
if (bits < 0) {
return false;
}
gzipHeaderSubstate = GzipHeaderState.Done;
goto case GzipHeaderState.Done;
case GzipHeaderState.Done:
return true;
default:
Debug.Assert(false, "We should not reach unknown state!");
throw new InvalidDataException(SR.GetString(SR.UnknownState));
}
}
}
public bool ReadFooter(InputBuffer input) {
input.SkipToByteBoundary();
if (gzipFooterSubstate == GzipHeaderState.ReadingCRC) {
while (loopCounter < 4) {
int bits = input.GetBits(8);
if (bits < 0) {
return false;
}
expectedCrc32 |= ((uint)bits << (8 * loopCounter));
loopCounter++;
}
gzipFooterSubstate = GzipHeaderState.ReadingFileSize;
loopCounter = 0;
}
if (gzipFooterSubstate == GzipHeaderState.ReadingFileSize) {
if (loopCounter == 0)
expectedOutputStreamSizeModulo = 0;
while (loopCounter < 4) {
int bits = input.GetBits(8);
if (bits < 0) {
return false;
}
expectedOutputStreamSizeModulo |= ((uint) bits << (8 * loopCounter));
loopCounter++;
}
}
return true;
}
public void UpdateWithBytesRead(byte[] buffer, int offset, int copied) {
actualCrc32 = Crc32Helper.UpdateCrc32(actualCrc32, buffer, offset, copied);
long n = actualStreamSizeModulo + (uint) copied;
if (n >= GZipConstants.FileLengthModulo) {
n %= GZipConstants.FileLengthModulo;
}
actualStreamSizeModulo = n;
}
public void Validate() {
if (expectedCrc32 != actualCrc32) {
throw new InvalidDataException(SR.GetString(SR.InvalidCRC));
}
if (actualStreamSizeModulo != expectedOutputStreamSizeModulo) {
throw new InvalidDataException(SR.GetString(SR.InvalidStreamSize));
}
}
internal enum GzipHeaderState {
// GZIP header
ReadingID1,
ReadingID2,
ReadingCM,
ReadingFLG,
ReadingMMTime, // iterates 4 times
ReadingXFL,
ReadingOS,
ReadingXLen1,
ReadingXLen2,
ReadingXLenData,
ReadingFileName,
ReadingComment,
ReadingCRC16Part1,
ReadingCRC16Part2,
Done, // done reading GZIP header
// GZIP footer
ReadingCRC, // iterates 4 times
ReadingFileSize // iterates 4 times
}
[Flags]
internal enum GZipOptionalHeaderFlags {
CRCFlag = 2,
ExtraFieldsFlag = 4,
FileNameFlag = 8,
CommentFlag = 16
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ae32247f283a26546b161336074d153e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,172 @@
namespace Unity.IO.Compression {
using System;
using System.IO;
using System.Diagnostics;
public class GZipStream : Stream {
private DeflateStream deflateStream;
public GZipStream(Stream stream, CompressionMode mode)
: this( stream, mode, false) {
}
public GZipStream(Stream stream, CompressionMode mode, bool leaveOpen) {
deflateStream = new DeflateStream(stream, mode, leaveOpen);
SetDeflateStreamFileFormatter(mode);
}
private void SetDeflateStreamFileFormatter(CompressionMode mode) {
if (mode == CompressionMode.Compress) {
IFileFormatWriter writeCommand = new GZipFormatter();
deflateStream.SetFileFormatWriter(writeCommand);
} else {
IFileFormatReader readCommand = new GZipDecoder();
deflateStream.SetFileFormatReader(readCommand);
}
}
public override bool CanRead {
get {
if( deflateStream == null) {
return false;
}
return deflateStream.CanRead;
}
}
public override bool CanWrite {
get {
if( deflateStream == null) {
return false;
}
return deflateStream.CanWrite;
}
}
public override bool CanSeek {
get {
if( deflateStream == null) {
return false;
}
return deflateStream.CanSeek;
}
}
public override long Length {
get {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
}
public override long Position {
get {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
set {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
}
public override void Flush() {
if( deflateStream == null) {
throw new ObjectDisposedException(null, SR.GetString(SR.ObjectDisposed_StreamClosed));
}
deflateStream.Flush();
return;
}
public override long Seek(long offset, SeekOrigin origin) {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
public override void SetLength(long value) {
throw new NotSupportedException(SR.GetString(SR.NotSupported));
}
#if !NETFX_CORE
public override IAsyncResult BeginRead(byte[] array, int offset, int count, AsyncCallback asyncCallback, object asyncState) {
if( deflateStream == null) {
throw new InvalidOperationException(SR.GetString(SR.ObjectDisposed_StreamClosed));
}
return deflateStream.BeginRead(array, offset, count, asyncCallback, asyncState);
}
public override int EndRead(IAsyncResult asyncResult) {
if( deflateStream == null) {
throw new InvalidOperationException(SR.GetString(SR.ObjectDisposed_StreamClosed));
}
return deflateStream.EndRead(asyncResult);
}
public override IAsyncResult BeginWrite(byte[] array, int offset, int count, AsyncCallback asyncCallback, object asyncState) {
if( deflateStream == null) {
throw new InvalidOperationException(SR.GetString(SR.ObjectDisposed_StreamClosed));
}
return deflateStream.BeginWrite(array, offset, count, asyncCallback, asyncState);
}
public override void EndWrite(IAsyncResult asyncResult) {
if( deflateStream == null) {
throw new InvalidOperationException(SR.GetString(SR.ObjectDisposed_StreamClosed));
}
deflateStream.EndWrite(asyncResult);
}
#endif
public override int Read(byte[] array, int offset, int count) {
if( deflateStream == null) {
throw new ObjectDisposedException(null, SR.GetString(SR.ObjectDisposed_StreamClosed));
}
return deflateStream.Read(array, offset, count);
}
public override void Write(byte[] array, int offset, int count) {
if( deflateStream == null) {
throw new ObjectDisposedException(null, SR.GetString(SR.ObjectDisposed_StreamClosed));
}
deflateStream.Write(array, offset, count);
}
protected override void Dispose(bool disposing) {
try {
if (disposing && deflateStream != null) {
deflateStream.Dispose();
}
deflateStream = null;
}
finally {
base.Dispose(disposing);
}
}
public Stream BaseStream {
get {
if( deflateStream != null) {
return deflateStream.BaseStream;
}
else {
return null;
}
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: cd288fd89ab9ad8429570e158d07477c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,87 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
namespace Unity.IO.Compression {
using System.Diagnostics;
internal static class GZipConstants {
internal const int CompressionLevel_3 = 3;
internal const int CompressionLevel_10 = 10;
internal const long FileLengthModulo = 4294967296;
internal const byte ID1 = 0x1F;
internal const byte ID2 = 0x8B;
internal const byte Deflate = 0x8;
internal const int Xfl_HeaderPos = 8;
internal const byte Xfl_FastestAlgorithm = 4;
internal const byte Xfl_MaxCompressionSlowestAlgorithm = 2;
}
internal class GZipFormatter : IFileFormatWriter {
private byte[] headerBytes = new byte[] {
GZipConstants.ID1, // ID1
GZipConstants.ID2, // ID2
GZipConstants.Deflate, // CM = deflate
0, // FLG, no text, no crc, no extra, no name, no comment
// MTIME (Modification Time) - no time available
0,
0,
0,
0,
// XFL
// 2 = compressor used max compression, slowest algorithm
// 4 = compressor used fastest algorithm
GZipConstants.Xfl_FastestAlgorithm,
// OS: 0 = FAT filesystem (MS-DOS, OS/2, NT/Win32)
0
};
private uint _crc32;
private long _inputStreamSizeModulo;
internal GZipFormatter() : this(GZipConstants.CompressionLevel_3) { }
internal GZipFormatter(int compressionLevel) {
if (compressionLevel == GZipConstants.CompressionLevel_10) {
headerBytes[GZipConstants.Xfl_HeaderPos] = GZipConstants.Xfl_MaxCompressionSlowestAlgorithm;
}
}
public byte[] GetHeader() {
return headerBytes;
}
public void UpdateWithBytesRead(byte[] buffer, int offset, int bytesToCopy) {
_crc32 = Crc32Helper.UpdateCrc32(_crc32, buffer, offset, bytesToCopy);
long n = _inputStreamSizeModulo + (uint) bytesToCopy;
if (n >= GZipConstants.FileLengthModulo) {
n %= GZipConstants.FileLengthModulo;
}
_inputStreamSizeModulo = n;
}
public byte[] GetFooter() {
byte[] b = new byte[8];
WriteUInt32(b, _crc32, 0);
WriteUInt32(b, (uint)_inputStreamSizeModulo, 4);
return b;
}
internal void WriteUInt32(byte[] b, uint value, int startIndex) {
b[startIndex] = (byte)value;
b[startIndex + 1] = (byte)(value >> 8);
b[startIndex + 2] = (byte)(value >> 16);
b[startIndex + 3] = (byte)(value >> 24);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 730a067bff4eeaf41804bfc2e9bd4dd1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,290 @@
namespace Unity.IO.Compression
{
using System;
using System.Diagnostics;
// Strictly speaking this class is not a HuffmanTree, this class is
// a lookup table combined with a HuffmanTree. The idea is to speed up
// the lookup for short symbols (they should appear more frequently ideally.)
// However we don't want to create a huge table since it might take longer to
// build the table than decoding (Deflate usually generates new tables frequently.)
//
// Jean-loup Gailly and Mark Adler gave a very good explanation about this.
// The full text (algorithm.txt) can be found inside
// ftp://ftp.uu.net/pub/archiving/zip/zlib/zlib.zip.
//
// Following paper explains decoding in details:
// Hirschberg and Lelewer, "Efficient decoding of prefix codes,"
// Comm. ACM, 33,4, April 1990, pp. 449-459.
//
internal class HuffmanTree {
internal const int MaxLiteralTreeElements = 288;
internal const int MaxDistTreeElements = 32;
internal const int EndOfBlockCode = 256;
internal const int NumberOfCodeLengthTreeElements = 19;
int tableBits;
short[] table;
short[] left;
short[] right;
byte[] codeLengthArray;
#if DEBUG
uint[] codeArrayDebug;
#endif
int tableMask;
// huffman tree for static block
static HuffmanTree staticLiteralLengthTree;
static HuffmanTree staticDistanceTree;
static HuffmanTree() {
// construct the static literal tree and distance tree
staticLiteralLengthTree = new HuffmanTree(GetStaticLiteralTreeLength());
staticDistanceTree = new HuffmanTree(GetStaticDistanceTreeLength());
}
static public HuffmanTree StaticLiteralLengthTree {
get {
return staticLiteralLengthTree;
}
}
static public HuffmanTree StaticDistanceTree {
get {
return staticDistanceTree;
}
}
public HuffmanTree(byte[] codeLengths) {
Debug.Assert( codeLengths.Length == MaxLiteralTreeElements
|| codeLengths.Length == MaxDistTreeElements
|| codeLengths.Length == NumberOfCodeLengthTreeElements,
"we only expect three kinds of Length here");
codeLengthArray = codeLengths;
if (codeLengthArray.Length == MaxLiteralTreeElements) { // bits for Literal/Length tree table
tableBits = 9;
}
else { // bits for distance tree table and code length tree table
tableBits = 7;
}
tableMask = (1 << tableBits) -1;
CreateTable();
}
// Generate the array contains huffman codes lengths for static huffman tree.
// The data is in RFC 1951.
static byte[] GetStaticLiteralTreeLength() {
byte[] literalTreeLength = new byte[MaxLiteralTreeElements];
for (int i = 0; i <= 143; i++)
literalTreeLength[i] = 8;
for (int i = 144; i <= 255; i++)
literalTreeLength[i] = 9;
for (int i = 256; i <= 279; i++)
literalTreeLength[i] = 7;
for (int i = 280; i <= 287; i++)
literalTreeLength[i] = 8;
return literalTreeLength;
}
static byte[] GetStaticDistanceTreeLength() {
byte[] staticDistanceTreeLength = new byte[MaxDistTreeElements];
for (int i = 0; i < MaxDistTreeElements; i++) {
staticDistanceTreeLength[i] = 5;
}
return staticDistanceTreeLength;
}
// Calculate the huffman code for each character based on the code length for each character.
// This algorithm is described in standard RFC 1951
uint[] CalculateHuffmanCode() {
uint[] bitLengthCount = new uint[17];
foreach( int codeLength in codeLengthArray) {
bitLengthCount[codeLength]++;
}
bitLengthCount[0] = 0; // clear count for length 0
uint[] nextCode = new uint[17];
uint tempCode = 0;
for (int bits = 1; bits <= 16; bits++) {
tempCode = (tempCode + bitLengthCount[bits-1]) << 1;
nextCode[bits] = tempCode;
}
uint[] code = new uint[MaxLiteralTreeElements];
for (int i = 0; i < codeLengthArray.Length; i++) {
int len = codeLengthArray[i];
if (len > 0) {
code[i] = FastEncoderStatics.BitReverse(nextCode[len], len);
nextCode[len]++;
}
}
return code;
}
private void CreateTable() {
uint[] codeArray = CalculateHuffmanCode();
table = new short[ 1 << tableBits];
#if DEBUG
codeArrayDebug = codeArray;
#endif
// I need to find proof that left and right array will always be
// enough. I think they are.
left = new short[2* codeLengthArray.Length];
right = new short[2* codeLengthArray.Length];
short avail = (short)codeLengthArray.Length;
for (int ch = 0; ch < codeLengthArray.Length; ch++) {
// length of this code
int len = codeLengthArray[ch];
if (len > 0) {
// start value (bit reversed)
int start = (int)codeArray[ch];
if (len <= tableBits) {
// If a particular symbol is shorter than nine bits,
// then that symbol's translation is duplicated
// in all those entries that start with that symbol's bits.
// For example, if the symbol is four bits, then it's duplicated
// 32 times in a nine-bit table. If a symbol is nine bits long,
// it appears in the table once.
//
// Make sure that in the loop below, code is always
// less than table_size.
//
// On last iteration we store at array index:
// initial_start_at + (locs-1)*increment
// = initial_start_at + locs*increment - increment
// = initial_start_at + (1 << tableBits) - increment
// = initial_start_at + table_size - increment
//
// Therefore we must ensure:
// initial_start_at + table_size - increment < table_size
// or: initial_start_at < increment
//
int increment = 1 << len;
if (start >= increment) {
throw new InvalidDataException(SR.GetString(SR.InvalidHuffmanData));
}
// Note the bits in the table are reverted.
int locs = 1 << (tableBits - len);
for (int j = 0; j < locs; j++) {
table[start] = (short) ch;
start += increment;
}
} else {
// For any code which has length longer than num_elements,
// build a binary tree.
int overflowBits = len - tableBits; // the nodes we need to respent the data.
int codeBitMask = 1 << tableBits; // mask to get current bit (the bits can't fit in the table)
// the left, right table is used to repesent the
// the rest bits. When we got the first part (number bits.) and look at
// tbe table, we will need to follow the tree to find the real character.
// This is in place to avoid bloating the table if there are
// a few ones with long code.
int index = start & ((1 << tableBits) -1);
short[] array = table;
do {
short value = array[index];
if (value == 0) { // set up next pointer if this node is not used before.
array[index] = (short)-avail; // use next available slot.
value = (short)-avail;
avail++;
}
if (value > 0) { // prevent an IndexOutOfRangeException from array[index]
throw new InvalidDataException(SR.GetString(SR.InvalidHuffmanData));
}
Debug.Assert( value < 0, "CreateTable: Only negative numbers are used for tree pointers!");
if ((start & codeBitMask) == 0) { // if current bit is 0, go change the left array
array = left;
} else { // if current bit is 1, set value in the right array
array = right;
}
index = -value; // go to next node
codeBitMask <<= 1;
overflowBits--;
} while (overflowBits != 0);
array[index] = (short) ch;
}
}
}
}
//
// This function will try to get enough bits from input and
// try to decode the bits.
// If there are no enought bits in the input, this function will return -1.
//
public int GetNextSymbol(InputBuffer input) {
// Try to load 16 bits into input buffer if possible and get the bitBuffer value.
// If there aren't 16 bits available we will return all we have in the
// input buffer.
uint bitBuffer = input.TryLoad16Bits();
if( input.AvailableBits == 0) { // running out of input.
return -1;
}
// decode an element
int symbol = table[bitBuffer & tableMask];
if( symbol < 0) { // this will be the start of the binary tree
// navigate the tree
uint mask = (uint)1 << tableBits;
do
{
symbol = -symbol;
if ((bitBuffer & mask) == 0)
symbol = left[symbol];
else
symbol = right[symbol];
mask <<= 1;
} while (symbol < 0);
}
int codeLength = codeLengthArray[symbol];
// huffman code lengths must be at least 1 bit long
if (codeLength <= 0)
{
throw new InvalidDataException(SR.GetString(SR.InvalidHuffmanData));
}
//
// If this code is longer than the # bits we had in the bit buffer (i.e.
// we read only part of the code), we can hit the entry in the table or the tree
// for another symbol. However the length of another symbol will not match the
// available bits count.
if (codeLength > input.AvailableBits)
{
// We already tried to load 16 bits and maximum length is 15,
// so this means we are running out of input.
return -1;
}
input.SkipBits(codeLength);
return symbol;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 1fe9afcfe6ac39d4089fea8ee1a07813
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,10 @@
using System;
namespace Unity.IO.Compression {
internal interface IDeflater : IDisposable {
bool NeedsInput();
void SetInput(byte[] inputBuffer, int startIndex, int count);
int GetDeflateOutput(byte[] outputBuffer);
bool Finish(byte[] outputBuffer, out int bytesRead);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e4bbb3e48f7e4f04ebe42577c871b0fc
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,645 @@
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// zlib.h -- interface of the 'zlib' general purpose compression library
// version 1.2.1, November 17th, 2003
//
// Copyright (C) 1995-2003 Jean-loup Gailly and Mark Adler
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
// 3. This notice may not be removed or altered from any source distribution.
//
//
// ==--==
namespace Unity.IO.Compression
{
using System;
using System.Diagnostics;
internal class Inflater {
// const tables used in decoding:
// Extra bits for length code 257 - 285.
private static readonly byte[] extraLengthBits = {
0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
// The base length for length code 257 - 285.
// The formula to get the real length for a length code is lengthBase[code - 257] + (value stored in extraBits)
private static readonly int[] lengthBase = {
3,4,5,6,7,8,9,10,11,13,15,17,19,23,27,31,35,43,51,59,67,83,99,115,131,163,195,227,258};
// The base distance for distance code 0 - 29
// The real distance for a distance code is distanceBasePosition[code] + (value stored in extraBits)
private static readonly int[] distanceBasePosition= {
1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,0,0};
// code lengths for code length alphabet is stored in following order
private static readonly byte[] codeOrder = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
private static readonly byte[] staticDistanceTreeTable = {
0x00,0x10,0x08,0x18,0x04,0x14,0x0c,0x1c,0x02,0x12,0x0a,0x1a,
0x06,0x16,0x0e,0x1e,0x01,0x11,0x09,0x19,0x05,0x15,0x0d,0x1d,
0x03,0x13,0x0b,0x1b,0x07,0x17,0x0f,0x1f,
};
private OutputWindow output;
private InputBuffer input;
HuffmanTree literalLengthTree;
HuffmanTree distanceTree;
InflaterState state;
bool hasFormatReader;
int bfinal;
BlockType blockType;
// uncompressed block
byte[] blockLengthBuffer = new byte[4];
int blockLength;
// compressed block
private int length;
private int distanceCode;
private int extraBits;
private int loopCounter;
private int literalLengthCodeCount;
private int distanceCodeCount;
private int codeLengthCodeCount;
private int codeArraySize;
private int lengthCode;
private byte[] codeList; // temporary array to store the code length for literal/Length and distance
private byte[] codeLengthTreeCodeLength;
HuffmanTree codeLengthTree;
IFileFormatReader formatReader; // class to decode header and footer (e.g. gzip)
public Inflater() {
output = new OutputWindow();
input = new InputBuffer();
codeList = new byte[HuffmanTree.MaxLiteralTreeElements + HuffmanTree.MaxDistTreeElements];
codeLengthTreeCodeLength = new byte[HuffmanTree.NumberOfCodeLengthTreeElements];
Reset();
}
internal void SetFileFormatReader(IFileFormatReader reader) {
formatReader = reader;
hasFormatReader = true;
Reset();
}
private void Reset() {
if ( hasFormatReader) {
state = InflaterState.ReadingHeader; // start by reading Header info
}
else {
state = InflaterState.ReadingBFinal; // start by reading BFinal bit
}
}
public void SetInput(byte[] inputBytes, int offset, int length) {
input.SetInput(inputBytes, offset, length); // append the bytes
}
public bool Finished() {
return (state == InflaterState.Done || state== InflaterState.VerifyingFooter);
}
public int AvailableOutput{
get {
return output.AvailableBytes;
}
}
public bool NeedsInput(){
return input.NeedsInput();
}
public int Inflate(byte[] bytes, int offset, int length) {
// copy bytes from output to outputbytes if we have aviable bytes
// if buffer is not filled up. keep decoding until no input are available
// if decodeBlock returns false. Throw an exception.
int count = 0;
do
{
int copied = output.CopyTo(bytes, offset, length);
if( copied > 0) {
if( hasFormatReader) {
formatReader.UpdateWithBytesRead(bytes, offset, copied);
}
offset += copied;
count += copied;
length -= copied;
}
if (length == 0) { // filled in the bytes array
break;
}
// Decode will return false when more input is needed
} while ( !Finished() && Decode());
if( state == InflaterState.VerifyingFooter) { // finished reading CRC
// In this case finished is true and output window has all the data.
// But some data in output window might not be copied out.
if( output.AvailableBytes == 0) {
formatReader.Validate();
}
}
return count;
}
//Each block of compressed data begins with 3 header bits
// containing the following data:
// first bit BFINAL
// next 2 bits BTYPE
// Note that the header bits do not necessarily begin on a byte
// boundary, since a block does not necessarily occupy an integral
// number of bytes.
// BFINAL is set if and only if this is the last block of the data
// set.
// BTYPE specifies how the data are compressed, as follows:
// 00 - no compression
// 01 - compressed with fixed Huffman codes
// 10 - compressed with dynamic Huffman codes
// 11 - reserved (error)
// The only difference between the two compressed cases is how the
// Huffman codes for the literal/length and distance alphabets are
// defined.
//
// This function returns true for success (end of block or output window is full,)
// false if we are short of input
//
private bool Decode() {
bool eob = false;
bool result = false;
if( Finished()) {
return true;
}
if (hasFormatReader) {
if (state == InflaterState.ReadingHeader) {
if (!formatReader.ReadHeader(input)) {
return false;
}
state = InflaterState.ReadingBFinal;
}
else if (state == InflaterState.StartReadingFooter || state == InflaterState.ReadingFooter) {
if (!formatReader.ReadFooter(input))
return false;
state = InflaterState.VerifyingFooter;
return true;
}
}
if( state == InflaterState.ReadingBFinal) { // reading bfinal bit
// Need 1 bit
if (!input.EnsureBitsAvailable(1))
return false;
bfinal = input.GetBits(1);
state = InflaterState.ReadingBType;
}
if( state == InflaterState.ReadingBType) {
// Need 2 bits
if (!input.EnsureBitsAvailable(2)) {
state = InflaterState.ReadingBType;
return false;
}
blockType = (BlockType)input.GetBits(2);
if (blockType == BlockType.Dynamic) {
state = InflaterState.ReadingNumLitCodes;
}
else if (blockType == BlockType.Static) {
literalLengthTree = HuffmanTree.StaticLiteralLengthTree;
distanceTree = HuffmanTree.StaticDistanceTree;
state = InflaterState.DecodeTop;
}
else if (blockType == BlockType.Uncompressed) {
state = InflaterState.UncompressedAligning;
}
else {
throw new InvalidDataException(SR.GetString(SR.UnknownBlockType));
}
}
if (blockType == BlockType.Dynamic) {
if (state < InflaterState.DecodeTop) { // we are reading the header
result = DecodeDynamicBlockHeader();
}
else {
result = DecodeBlock(out eob); // this can returns true when output is full
}
}
else if (blockType == BlockType.Static) {
result = DecodeBlock(out eob);
}
else if (blockType == BlockType.Uncompressed) {
result = DecodeUncompressedBlock(out eob);
}
else {
throw new InvalidDataException(SR.GetString(SR.UnknownBlockType));
}
//
// If we reached the end of the block and the block we were decoding had
// bfinal=1 (final block)
//
if (eob && (bfinal != 0)) {
if (hasFormatReader)
state = InflaterState.StartReadingFooter;
else
state = InflaterState.Done;
}
return result;
}
// Format of Non-compressed blocks (BTYPE=00):
//
// Any bits of input up to the next byte boundary are ignored.
// The rest of the block consists of the following information:
//
// 0 1 2 3 4...
// +---+---+---+---+================================+
// | LEN | NLEN |... LEN bytes of literal data...|
// +---+---+---+---+================================+
//
// LEN is the number of data bytes in the block. NLEN is the
// one's complement of LEN.
bool DecodeUncompressedBlock(out bool end_of_block) {
end_of_block = false;
while(true) {
switch( state) {
case InflaterState.UncompressedAligning: // intial state when calling this function
// we must skip to a byte boundary
input.SkipToByteBoundary();
state = InflaterState.UncompressedByte1;
goto case InflaterState.UncompressedByte1;
case InflaterState.UncompressedByte1: // decoding block length
case InflaterState.UncompressedByte2:
case InflaterState.UncompressedByte3:
case InflaterState.UncompressedByte4:
int bits = input.GetBits(8);
if( bits < 0) {
return false;
}
blockLengthBuffer[state - InflaterState.UncompressedByte1] = (byte)bits;
if( state == InflaterState.UncompressedByte4) {
blockLength = blockLengthBuffer[0] + ((int)blockLengthBuffer[1]) * 256;
int blockLengthComplement= blockLengthBuffer[2] + ((int)blockLengthBuffer[3]) * 256;
// make sure complement matches
if ((ushort) blockLength != (ushort)(~blockLengthComplement)) {
throw new InvalidDataException(SR.GetString(SR.InvalidBlockLength));
}
}
state += 1;
break;
case InflaterState.DecodingUncompressed: // copying block data
// Directly copy bytes from input to output.
int bytesCopied = output.CopyFrom(input, blockLength);
blockLength -= bytesCopied;
if (blockLength == 0) {
// Done with this block, need to re-init bit buffer for next block
state = InflaterState.ReadingBFinal;
end_of_block = true;
return true;
}
// We can fail to copy all bytes for two reasons:
// Running out of Input
// running out of free space in output window
if(output.FreeBytes == 0) {
return true;
}
return false;
default:
Debug.Assert(false, "check why we are here!");
throw new InvalidDataException(SR.GetString(SR.UnknownState));
}
}
}
bool DecodeBlock(out bool end_of_block_code_seen) {
end_of_block_code_seen = false;
int freeBytes = output.FreeBytes; // it is a little bit faster than frequently accessing the property
while(freeBytes > 258) {
// 258 means we can safely do decoding since maximum repeat length is 258
int symbol;
switch (state) {
case InflaterState.DecodeTop:
// decode an element from the literal tree
//
symbol = literalLengthTree.GetNextSymbol(input);
if( symbol < 0) { // running out of input
return false;
}
if (symbol < 256) { // literal
output.Write((byte)symbol);
--freeBytes;
}
else if( symbol == 256) { // end of block
end_of_block_code_seen = true;
// Reset state
state = InflaterState.ReadingBFinal;
return true; // ***********
}
else { // length/distance pair
symbol -= 257; // length code started at 257
if( symbol < 8) {
symbol += 3; // match length = 3,4,5,6,7,8,9,10
extraBits = 0;
}
else if( symbol == 28) { // extra bits for code 285 is 0
symbol = 258; // code 285 means length 258
extraBits = 0;
}
else {
if( symbol < 0 || symbol >= extraLengthBits.Length ) {
throw new InvalidDataException(SR.GetString(SR.GenericInvalidData));
}
extraBits = extraLengthBits[symbol];
Debug.Assert(extraBits != 0, "We handle other cases seperately!");
}
length = symbol;
goto case InflaterState.HaveInitialLength;
}
break;
case InflaterState.HaveInitialLength:
if( extraBits > 0) {
state = InflaterState.HaveInitialLength;
int bits = input.GetBits(extraBits);
if( bits < 0) {
return false;
}
if( length < 0 || length >= lengthBase.Length ) {
throw new InvalidDataException(SR.GetString(SR.GenericInvalidData));
}
length = lengthBase[length] + bits;
}
state = InflaterState.HaveFullLength;
goto case InflaterState.HaveFullLength;
case InflaterState.HaveFullLength:
if( blockType == BlockType.Dynamic) {
distanceCode = distanceTree.GetNextSymbol(input);
}
else { // get distance code directly for static block
distanceCode = input.GetBits(5);
if( distanceCode >= 0 ) {
distanceCode = staticDistanceTreeTable[distanceCode];
}
}
if( distanceCode < 0) { // running out input
return false;
}
state = InflaterState.HaveDistCode;
goto case InflaterState.HaveDistCode;
case InflaterState.HaveDistCode:
// To avoid a table lookup we note that for distanceCode >= 2,
// extra_bits = (distanceCode-2) >> 1
int offset;
if( distanceCode > 3) {
extraBits = (distanceCode-2) >> 1;
int bits = input.GetBits(extraBits);
if( bits < 0 ) {
return false;
}
offset = distanceBasePosition[distanceCode] + bits;
}
else {
offset = distanceCode + 1;
}
Debug.Assert(freeBytes>= 258, "following operation is not safe!");
output.WriteLengthDistance(length, offset);
freeBytes -= length;
state = InflaterState.DecodeTop;
break;
default:
Debug.Assert(false, "check why we are here!");
throw new InvalidDataException(SR.GetString(SR.UnknownState));
}
}
return true;
}
// Format of the dynamic block header:
// 5 Bits: HLIT, # of Literal/Length codes - 257 (257 - 286)
// 5 Bits: HDIST, # of Distance codes - 1 (1 - 32)
// 4 Bits: HCLEN, # of Code Length codes - 4 (4 - 19)
//
// (HCLEN + 4) x 3 bits: code lengths for the code length
// alphabet given just above, in the order: 16, 17, 18,
// 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
//
// These code lengths are interpreted as 3-bit integers
// (0-7); as above, a code length of 0 means the
// corresponding symbol (literal/length or distance code
// length) is not used.
//
// HLIT + 257 code lengths for the literal/length alphabet,
// encoded using the code length Huffman code
//
// HDIST + 1 code lengths for the distance alphabet,
// encoded using the code length Huffman code
//
// The code length repeat codes can cross from HLIT + 257 to the
// HDIST + 1 code lengths. In other words, all code lengths form
// a single sequence of HLIT + HDIST + 258 values.
bool DecodeDynamicBlockHeader() {
switch (state) {
case InflaterState.ReadingNumLitCodes:
literalLengthCodeCount = input.GetBits(5);
if( literalLengthCodeCount < 0) {
return false;
}
literalLengthCodeCount += 257;
state = InflaterState.ReadingNumDistCodes;
goto case InflaterState.ReadingNumDistCodes;
case InflaterState.ReadingNumDistCodes:
distanceCodeCount = input.GetBits(5);
if( distanceCodeCount < 0) {
return false;
}
distanceCodeCount += 1;
state = InflaterState.ReadingNumCodeLengthCodes;
goto case InflaterState.ReadingNumCodeLengthCodes;
case InflaterState.ReadingNumCodeLengthCodes:
codeLengthCodeCount = input.GetBits(4);
if( codeLengthCodeCount < 0) {
return false;
}
codeLengthCodeCount += 4;
loopCounter = 0;
state = InflaterState.ReadingCodeLengthCodes;
goto case InflaterState.ReadingCodeLengthCodes;
case InflaterState.ReadingCodeLengthCodes:
while(loopCounter < codeLengthCodeCount) {
int bits = input.GetBits(3);
if( bits < 0) {
return false;
}
codeLengthTreeCodeLength[codeOrder[loopCounter]] = (byte)bits;
++loopCounter;
}
for (int i = codeLengthCodeCount; i < codeOrder.Length; i++) {
codeLengthTreeCodeLength[ codeOrder[i] ] = 0;
}
// create huffman tree for code length
codeLengthTree = new HuffmanTree(codeLengthTreeCodeLength);
codeArraySize = literalLengthCodeCount + distanceCodeCount;
loopCounter = 0; // reset loop count
state = InflaterState.ReadingTreeCodesBefore;
goto case InflaterState.ReadingTreeCodesBefore;
case InflaterState.ReadingTreeCodesBefore:
case InflaterState.ReadingTreeCodesAfter:
while (loopCounter < codeArraySize) {
if( state == InflaterState.ReadingTreeCodesBefore) {
if( (lengthCode = codeLengthTree.GetNextSymbol(input)) < 0) {
return false;
}
}
// The alphabet for code lengths is as follows:
// 0 - 15: Represent code lengths of 0 - 15
// 16: Copy the previous code length 3 - 6 times.
// The next 2 bits indicate repeat length
// (0 = 3, ... , 3 = 6)
// Example: Codes 8, 16 (+2 bits 11),
// 16 (+2 bits 10) will expand to
// 12 code lengths of 8 (1 + 6 + 5)
// 17: Repeat a code length of 0 for 3 - 10 times.
// (3 bits of length)
// 18: Repeat a code length of 0 for 11 - 138 times
// (7 bits of length)
if (lengthCode <= 15) {
codeList[loopCounter++] = (byte)lengthCode;
}
else {
if( !input.EnsureBitsAvailable(7)) { // it doesn't matter if we require more bits here
state = InflaterState.ReadingTreeCodesAfter;
return false;
}
int repeatCount;
if (lengthCode == 16) {
if (loopCounter == 0) { // can't have "prev code" on first code
throw new InvalidDataException();
}
byte previousCode = codeList[loopCounter-1];
repeatCount = input.GetBits(2) + 3;
if (loopCounter + repeatCount > codeArraySize) {
throw new InvalidDataException();
}
for (int j = 0; j < repeatCount; j++) {
codeList[loopCounter++] = previousCode;
}
}
else if (lengthCode == 17) {
repeatCount = input.GetBits(3) + 3;
if (loopCounter + repeatCount > codeArraySize) {
throw new InvalidDataException();
}
for (int j = 0; j < repeatCount; j++) {
codeList[loopCounter++] = 0;
}
}
else { // code == 18
repeatCount = input.GetBits(7) + 11;
if (loopCounter + repeatCount > codeArraySize) {
throw new InvalidDataException();
}
for (int j = 0; j < repeatCount; j++) {
codeList[loopCounter++] = 0;
}
}
}
state = InflaterState.ReadingTreeCodesBefore; // we want to read the next code.
}
break;
default:
Debug.Assert(false, "check why we are here!");
throw new InvalidDataException(SR.GetString(SR.UnknownState));
}
byte[] literalTreeCodeLength = new byte[HuffmanTree.MaxLiteralTreeElements];
byte[] distanceTreeCodeLength = new byte[HuffmanTree.MaxDistTreeElements];
// Create literal and distance tables
Array.Copy(codeList, literalTreeCodeLength, literalLengthCodeCount);
Array.Copy(codeList, literalLengthCodeCount, distanceTreeCodeLength, 0, distanceCodeCount);
// Make sure there is an end-of-block code, otherwise how could we ever end?
if (literalTreeCodeLength[HuffmanTree.EndOfBlockCode] == 0) {
throw new InvalidDataException();
}
literalLengthTree = new HuffmanTree(literalTreeCodeLength);
distanceTree = new HuffmanTree(distanceTreeCodeLength);
state = InflaterState.DecodeTop;
return true;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7fdb136adcd29954995bac65c7219b4d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,37 @@
namespace Unity.IO.Compression {
// Do not rearrange the enum values.
internal enum InflaterState {
ReadingHeader = 0, // Only applies to GZIP
ReadingBFinal = 2, // About to read bfinal bit
ReadingBType = 3, // About to read blockType bits
ReadingNumLitCodes = 4, // About to read # literal codes
ReadingNumDistCodes = 5, // About to read # dist codes
ReadingNumCodeLengthCodes = 6, // About to read # code length codes
ReadingCodeLengthCodes = 7, // In the middle of reading the code length codes
ReadingTreeCodesBefore = 8, // In the middle of reading tree codes (loop top)
ReadingTreeCodesAfter = 9, // In the middle of reading tree codes (extension; code > 15)
DecodeTop = 10, // About to decode a literal (char/match) in a compressed block
HaveInitialLength = 11, // Decoding a match, have the literal code (base length)
HaveFullLength = 12, // Ditto, now have the full match length (incl. extra length bits)
HaveDistCode = 13, // Ditto, now have the distance code also, need extra dist bits
/* uncompressed blocks */
UncompressedAligning = 15,
UncompressedByte1 = 16,
UncompressedByte2 = 17,
UncompressedByte3 = 18,
UncompressedByte4 = 19,
DecodingUncompressed = 20,
// These three apply only to GZIP
StartReadingFooter = 21, // (Initialisation for reading footer)
ReadingFooter = 22,
VerifyingFooter = 23,
Done = 24 // Finished
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0fe75c6b962577e4bb4559bbb78af6eb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,184 @@
namespace Unity.IO.Compression
{
using System;
using System.Diagnostics;
// This class can be used to read bits from an byte array quickly.
// Normally we get bits from 'bitBuffer' field and bitsInBuffer stores
// the number of bits available in 'BitBuffer'.
// When we used up the bits in bitBuffer, we will try to get byte from
// the byte array and copy the byte to appropiate position in bitBuffer.
//
// The byte array is not reused. We will go from 'start' to 'end'.
// When we reach the end, most read operations will return -1,
// which means we are running out of input.
internal class InputBuffer {
private byte[] buffer; // byte array to store input
private int start; // start poisition of the buffer
private int end; // end position of the buffer
private uint bitBuffer = 0; // store the bits here, we can quickly shift in this buffer
private int bitsInBuffer = 0; // number of bits available in bitBuffer
// Total bits available in the input buffer
public int AvailableBits {
get{
return bitsInBuffer;
}
}
// Total bytes available in the input buffer
public int AvailableBytes {
get{
return(end - start) + (bitsInBuffer / 8);
}
}
// Ensure that count bits are in the bit buffer.
// Returns false if input is not sufficient to make this true.
// Count can be up to 16.
public bool EnsureBitsAvailable(int count) {
Debug.Assert( 0 < count && count <= 16, "count is invalid.");
// manual inlining to improve perf
if (bitsInBuffer < count) {
if (NeedsInput()) {
return false;
}
// insert a byte to bitbuffer
bitBuffer |= (uint)buffer[start++] << bitsInBuffer;
bitsInBuffer += 8;
if (bitsInBuffer < count) {
if (NeedsInput()) {
return false;
}
// insert a byte to bitbuffer
bitBuffer |= (uint)buffer[start++] << bitsInBuffer;
bitsInBuffer += 8;
}
}
return true;
}
// This function will try to load 16 or more bits into bitBuffer.
// It returns whatever is contained in bitBuffer after loading.
// The main difference between this and GetBits is that this will
// never return -1. So the caller needs to check AvailableBits to
// see how many bits are available.
public uint TryLoad16Bits() {
if(bitsInBuffer < 8) {
if( start < end) {
bitBuffer |= (uint)buffer[start++] << bitsInBuffer;
bitsInBuffer += 8;
}
if( start < end) {
bitBuffer |= (uint)buffer[start++] << bitsInBuffer;
bitsInBuffer += 8;
}
}
else if(bitsInBuffer < 16) {
if( start < end) {
bitBuffer |= (uint)buffer[start++] << bitsInBuffer;
bitsInBuffer += 8;
}
}
return bitBuffer;
}
private uint GetBitMask(int count) {
return ((uint)1 << count) - 1;
}
// Gets count bits from the input buffer. Returns -1 if not enough bits available.
public int GetBits(int count) {
Debug.Assert( 0 < count && count <= 16, "count is invalid.");
if ( !EnsureBitsAvailable(count) ) {
return -1;
}
int result = (int)(bitBuffer & GetBitMask(count));
bitBuffer >>= count;
bitsInBuffer -= count;
return result;
}
/// Copies length bytes from input buffer to output buffer starting
/// at output[offset]. You have to make sure, that the buffer is
/// byte aligned. If not enough bytes are available, copies fewer
/// bytes.
/// Returns the number of bytes copied, 0 if no byte is available.
public int CopyTo(byte[] output, int offset, int length) {
Debug.Assert( output != null, "");
Debug.Assert( offset >=0 , "");
Debug.Assert( length >=0 , "");
Debug.Assert( offset <= output.Length - length, "");
Debug.Assert( (bitsInBuffer % 8) ==0, "");
// Copy the bytes in bitBuffer first.
int bytesFromBitBuffer = 0;
while (bitsInBuffer > 0 && length > 0) {
output[offset++] = (byte) bitBuffer;
bitBuffer >>= 8;
bitsInBuffer -= 8;
length--;
bytesFromBitBuffer++;
}
if (length == 0) {
return bytesFromBitBuffer;
}
int avail = end - start;
if (length > avail) {
length = avail;
}
Array.Copy(buffer, start, output, offset, length);
start += length;
return bytesFromBitBuffer + length;
}
// Return true is all input bytes are used.
// This means the caller can call SetInput to add more input.
public bool NeedsInput() {
return start == end;
}
// Set the byte array to be processed.
// All the bits remained in bitBuffer will be processed before the new bytes.
// We don't clone the byte array here since it is expensive.
// The caller should make sure after a buffer is passed in.
// It will not be changed before calling this function again.
public void SetInput(byte[] buffer, int offset, int length) {
Debug.Assert( buffer != null, "");
Debug.Assert( offset >=0 , "");
Debug.Assert( length >=0 , "");
Debug.Assert( offset <= buffer.Length - length, "");
Debug.Assert( start == end, "");
this.buffer = buffer;
start = offset;
end = offset + length;
}
// Skip n bits in the buffer
public void SkipBits(int n) {
Debug.Assert(bitsInBuffer >= n, "No enough bits in the buffer, Did you call EnsureBitsAvailable?");
bitBuffer >>= n;
bitsInBuffer -= n;
}
// Skips to the next byte boundary.
public void SkipToByteBoundary() {
bitBuffer >>= (bitsInBuffer % 8);
bitsInBuffer = bitsInBuffer - (bitsInBuffer % 8);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5edc22ba82e12ce4aae6e16378803b53
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,34 @@
namespace Unity.IO.Compression {
using System;
using System.Runtime.Serialization;
#if !NETFX_CORE
[Serializable]
#endif // !FEATURE_NETCORE
public sealed class InvalidDataException
#if NETFX_CORE
: Exception
#else
: SystemException
#endif
{
public InvalidDataException ()
: base(SR.GetString(SR.GenericInvalidData)) {
}
public InvalidDataException (String message)
: base(message) {
}
public InvalidDataException (String message, Exception innerException)
: base(message, innerException) {
}
#if !NETFX_CORE
internal InvalidDataException (SerializationInfo info, StreamingContext context) : base(info, context) {
}
#endif // !NETFX_CORE
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 78ff52880c8e2a74685c7981134cf0a9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,31 @@
namespace Unity.IO.Compression {
// This class represents a match in the history window
internal class Match {
private MatchState state;
private int pos;
private int len;
private byte symbol;
internal MatchState State {
get { return state; }
set { state = value; }
}
internal int Position {
get { return pos; }
set { pos = value; }
}
internal int Length {
get { return len; }
set { len = value; }
}
internal byte Symbol {
get { return symbol; }
set { symbol = value; }
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ef38a6c88f4d4e740b955af34b455b6d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,5 @@
internal enum MatchState {
HasSymbol = 1,
HasMatch = 2,
HasSymbolAndMatch = 3
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e1492d08233a4b5468e7b44128eb35ce
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,116 @@
namespace Unity.IO.Compression {
using System;
using System.Diagnostics;
internal class OutputBuffer {
private byte[] byteBuffer; // buffer for storing bytes
private int pos; // position
private uint bitBuf; // store uncomplete bits
private int bitCount; // number of bits in bitBuffer
// set the output buffer we will be using
internal void UpdateBuffer(byte[] output) {
byteBuffer = output;
pos = 0;
}
internal int BytesWritten {
get {
return pos;
}
}
internal int FreeBytes {
get {
return byteBuffer.Length - pos;
}
}
internal void WriteUInt16(ushort value) {
Debug.Assert(FreeBytes >= 2, "No enough space in output buffer!");
byteBuffer[pos++] = (byte)value;
byteBuffer[pos++] = (byte)(value >> 8);
}
internal void WriteBits(int n, uint bits) {
Debug.Assert(n <= 16, "length must be larger than 16!");
bitBuf |= bits << bitCount;
bitCount += n;
if (bitCount >= 16) {
Debug.Assert(byteBuffer.Length - pos >= 2, "No enough space in output buffer!");
byteBuffer[pos++] = unchecked((byte)bitBuf);
byteBuffer[pos++] = unchecked((byte)(bitBuf >> 8));
bitCount -= 16;
bitBuf >>= 16;
}
}
// write the bits left in the output as bytes.
internal void FlushBits() {
// flush bits from bit buffer to output buffer
while (bitCount >= 8) {
byteBuffer[pos++] = unchecked((byte)bitBuf);
bitCount -= 8;
bitBuf >>= 8;
}
if (bitCount > 0) {
byteBuffer[pos++] = unchecked((byte)bitBuf);
bitBuf = 0;
bitCount = 0;
}
}
internal void WriteBytes(byte[] byteArray, int offset, int count) {
Debug.Assert(FreeBytes >= count, "Not enough space in output buffer!");
// faster
if (bitCount == 0) {
Array.Copy(byteArray, offset, byteBuffer, pos, count);
pos += count;
}
else {
WriteBytesUnaligned(byteArray, offset, count);
}
}
private void WriteBytesUnaligned(byte[] byteArray, int offset, int count) {
for (int i = 0; i < count; i++) {
byte b = byteArray[offset + i];
WriteByteUnaligned(b);
}
}
private void WriteByteUnaligned(byte b) {
WriteBits(8, b);
}
internal int BitsInBuffer {
get {
return (bitCount / 8) + 1;
}
}
internal OutputBuffer.BufferState DumpState() {
OutputBuffer.BufferState savedState;
savedState.pos = pos;
savedState.bitBuf = bitBuf;
savedState.bitCount = bitCount;
return savedState;
}
internal void RestoreState(OutputBuffer.BufferState state) {
pos = state.pos;
bitBuf = state.bitBuf;
bitCount = state.bitCount;
}
internal struct BufferState {
internal int pos; // position
internal uint bitBuf; // store uncomplete bits
internal int bitCount; // number of bits in bitBuffer
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e3704840b8764dd40b737804261e847c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,129 @@
namespace Unity.IO.Compression
{
using System;
using System.Diagnostics;
using System.Globalization;
// This class maintains a window for decompressed output.
// We need to keep this because the decompressed information can be
// a literal or a length/distance pair. For length/distance pair,
// we need to look back in the output window and copy bytes from there.
// We use a byte array of WindowSize circularly.
//
internal class OutputWindow {
private const int WindowSize = 32768;
private const int WindowMask = 32767;
private byte[] window = new byte[WindowSize]; //The window is 2^15 bytes
private int end; // this is the position to where we should write next byte
private int bytesUsed; // The number of bytes in the output window which is not consumed.
// Add a byte to output window
public void Write(byte b) {
Debug.Assert(bytesUsed < WindowSize, "Can't add byte when window is full!");
window[end++] = b;
end &= WindowMask;
++bytesUsed;
}
public void WriteLengthDistance(int length, int distance) {
Debug.Assert((bytesUsed + length) <= WindowSize, "Not enough space");
// move backwards distance bytes in the output stream,
// and copy length bytes from this position to the output stream.
bytesUsed += length;
int copyStart = (end - distance) & WindowMask; // start position for coping.
int border = WindowSize - length;
if (copyStart <= border && end < border) {
if (length <= distance) {
System.Array.Copy(window, copyStart, window, end, length);
end += length;
} else {
// The referenced string may overlap the current
// position; for example, if the last 2 bytes decoded have values
// X and Y, a string reference with <length = 5, distance = 2>
// adds X,Y,X,Y,X to the output stream.
while (length-- > 0) {
window[end++] = window[copyStart++];
}
}
}
else { // copy byte by byte
while (length-- > 0) {
window[end++] = window[copyStart++];
end &= WindowMask;
copyStart &= WindowMask;
}
}
}
// Copy up to length of bytes from input directly.
// This is used for uncompressed block.
public int CopyFrom(InputBuffer input, int length) {
length = Math.Min(Math.Min(length, WindowSize - bytesUsed), input.AvailableBytes);
int copied;
// We might need wrap around to copy all bytes.
int tailLen = WindowSize - end;
if (length > tailLen) {
// copy the first part
copied = input.CopyTo(window, end, tailLen);
if (copied == tailLen) {
// only try to copy the second part if we have enough bytes in input
copied += input.CopyTo(window, 0, length - tailLen);
}
}
else {
// only one copy is needed if there is no wrap around.
copied = input.CopyTo(window, end, length);
}
end = (end + copied) & WindowMask;
bytesUsed += copied;
return copied;
}
// Free space in output window
public int FreeBytes {
get {
return WindowSize - bytesUsed;
}
}
// bytes not consumed in output window
public int AvailableBytes {
get {
return bytesUsed;
}
}
// copy the decompressed bytes to output array.
public int CopyTo(byte[] output, int offset, int length) {
int copy_end;
if (length > bytesUsed) { // we can copy all the decompressed bytes out
copy_end = end;
length = bytesUsed;
} else {
copy_end = (end - bytesUsed + length) & WindowMask; // copy length of bytes
}
int copied = length;
int tailLen = length - copy_end;
if ( tailLen > 0) { // this means we need to copy two parts seperately
// copy tailLen bytes from the end of output window
System.Array.Copy(window, WindowSize - tailLen,
output, offset, tailLen);
offset += tailLen;
length = copy_end;
}
System.Array.Copy(window, copy_end - length, output, offset, length);
bytesUsed -= copied;
Debug.Assert(bytesUsed >= 0, "check this function and find why we copied more bytes than we have");
return copied;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f334e9704b79a394b9b21ca7d6a12b1f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,10 @@
# Unity.IO.Compression
This is a port of Microsoft's code from [here](https://github.com/Microsoft/referencesource/tree/master/System/sys/system/IO/compression).
The classes in System.IO.Compression in Unity 4.x [do not seem to work on Windows](http://answers.unity3d.com/questions/692250/gzipstream-and-deflatestream-give-entrypointnotfou.html) and perhaps several other platforms.
Luckily, Microsoft has released much of the source code of the .NET BCL. We have ported Microsoft's code to work in Unity. This seems like the cleanest and most stable way to get the GZipStream and DeflateStream classes working in Unity.
Find the plugin on the Unity Asset Store [here](https://www.assetstore.unity3d.com/#!/content/31902).
Built by [Hitcents](http://hitcents.com/), contact us [here](http://hitcents.com/contact) for questions.

View File

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 6ea4af6e593d40d4dbbf149f22bf9e7f
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,40 @@
// Copyright (c) Hitcents
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace Unity.IO.Compression {
/// <summary>
/// NOTE: this is a hacked in replacement for the SR class
/// Unity games don't care about localized exception messages, so we just hacked these in the best we could
/// </summary>
internal class SR
{
public const string ArgumentOutOfRange_Enum = "Argument out of range";
public const string CorruptedGZipHeader = "Corrupted gzip header";
public const string CannotReadFromDeflateStream = "Cannot read from deflate stream";
public const string CannotWriteToDeflateStream = "Cannot write to deflate stream";
public const string GenericInvalidData = "Invalid data";
public const string InvalidCRC = "Invalid CRC";
public const string InvalidStreamSize = "Invalid stream size";
public const string InvalidHuffmanData = "Invalid Huffman data";
public const string InvalidBeginCall = "Invalid begin call";
public const string InvalidEndCall = "Invalid end call";
public const string InvalidBlockLength = "Invalid block length";
public const string InvalidArgumentOffsetCount = "Invalid argument offset count";
public const string NotSupported = "Not supported";
public const string NotWriteableStream = "Not a writeable stream";
public const string NotReadableStream = "Not a readable stream";
public const string ObjectDisposed_StreamClosed = "Object disposed";
public const string UnknownState = "Unknown state";
public const string UnknownCompressionMode = "Unknown compression mode";
public const string UnknownBlockType = "Unknown block type";
private SR()
{
}
internal static string GetString(string p)
{
//HACK: just return the string passed in, not doing localization
return p;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6ba2bb8eed28cba4eb48ad0d624fd824
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: