Commit ec791200 authored by Michael Droettboom's avatar Michael Droettboom

Fix LZ4 decoding bug

This was introduced in #170, but wasn't caught since emsdk didn't get rebuilt
parent 5fb84895
...@@ -53,7 +53,7 @@ jobs: ...@@ -53,7 +53,7 @@ jobs:
- restore_cache: - restore_cache:
keys: keys:
- v1-emsdk-{{ checksum "emsdk/Makefile" }}-v6- - v1-emsdk-{{ checksum "emsdk/Makefile" }}-v7-
- run: - run:
name: build name: build
...@@ -68,7 +68,7 @@ jobs: ...@@ -68,7 +68,7 @@ jobs:
paths: paths:
- ./emsdk/emsdk - ./emsdk/emsdk
- ~/.ccache - ~/.ccache
key: v1-emsdk-{{ checksum "emsdk/Makefile" }}-v6-{{ .BuildNum }} key: v1-emsdk-{{ checksum "emsdk/Makefile" }}-v7-{{ .BuildNum }}
- persist_to_workspace: - persist_to_workspace:
root: . root: .
......
...@@ -52,7 +52,7 @@ index 4c3f583b7..5291002a4 100644 ...@@ -52,7 +52,7 @@ index 4c3f583b7..5291002a4 100644
var contents = stream.node.contents; var contents = stream.node.contents;
var compressedData = contents.compressedData; var compressedData = contents.compressedData;
var written = 0; var written = 0;
@@ -122,6 +117,8 @@ mergeInto(LibraryManager.library, { @@ -122,11 +117,14 @@ mergeInto(LibraryManager.library, {
var chunkIndex = Math.floor(start / LZ4.CHUNK_SIZE); var chunkIndex = Math.floor(start / LZ4.CHUNK_SIZE);
var compressedStart = compressedData.offsets[chunkIndex]; var compressedStart = compressedData.offsets[chunkIndex];
var compressedSize = compressedData.sizes[chunkIndex]; var compressedSize = compressedData.sizes[chunkIndex];
...@@ -61,7 +61,13 @@ index 4c3f583b7..5291002a4 100644 ...@@ -61,7 +61,13 @@ index 4c3f583b7..5291002a4 100644
var currChunk; var currChunk;
if (compressedData.successes[chunkIndex]) { if (compressedData.successes[chunkIndex]) {
var found = compressedData.cachedIndexes.indexOf(chunkIndex); var found = compressedData.cachedIndexes.indexOf(chunkIndex);
@@ -138,18 +135,19 @@ mergeInto(LibraryManager.library, { if (found >= 0) {
currChunk = compressedData.cachedChunks[found];
+ buffer.set(Module.HEAPU8.subarray(currChunk + startInChunk, currChunk + endInChunk), offset + written);
} else {
// decompress the chunk
compressedData.cachedIndexes.pop();
@@ -138,18 +136,19 @@ mergeInto(LibraryManager.library, {
Module['decompressedChunks'] = (Module['decompressedChunks'] || 0) + 1; Module['decompressedChunks'] = (Module['decompressedChunks'] || 0) + 1;
} }
var compressed = compressedData.data.subarray(compressedStart, compressedStart + compressedSize); var compressed = compressedData.data.subarray(compressedStart, compressedStart + compressedSize);
...@@ -89,7 +95,7 @@ index 4c3f583b7..5291002a4 100644 ...@@ -89,7 +95,7 @@ index 4c3f583b7..5291002a4 100644
var currWritten = endInChunk - startInChunk; var currWritten = endInChunk - startInChunk;
written += currWritten; written += currWritten;
} }
@@ -181,4 +179,3 @@ if (LibraryManager.library['$FS__deps']) { @@ -181,4 +180,3 @@ if (LibraryManager.library['$FS__deps']) {
warn('FS does not seem to be in use (no preloaded files etc.), LZ4 will not do anything'); warn('FS does not seem to be in use (no preloaded files etc.), LZ4 will not do anything');
} }
#endif #endif
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment