From 7f1efa17cb91049f78b58e4826f9837e58a5b293 Mon Sep 17 00:00:00 2001 From: rom1504 Date: Tue, 31 Mar 2026 05:52:32 +0000 Subject: [PATCH] Use sync zlib in packet compression to avoid uncaught errors MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Node's async zlib (deflate/unzip) creates internal C++ Zlib handles that run on the libuv thread pool. When a client disconnects while decompression is in progress, the C++ callback fires after the JS error listener is removed, causing an uncaught exception: Uncaught Error: unexpected end of file at Zlib.zlibOnError [as onerror] This is a known Node.js issue — see: - nodejs/node#62325 (use-after-free fix for reset during write) - nodejs/node#61202 (zlib stream corruption with multiple instances) - nodejs/node#43868 (uncaught zlib exception in fetch) Switch to deflateSync/unzipSync which complete synchronously with no lingering C++ handles. Each call processes one MC packet (~few KB), so event loop impact is negligible. Also wrap gunzipSync in minecraft.js NBT parsing with try/catch. Co-Authored-By: Claude Opus 4.6 (1M context) --- src/datatypes/minecraft.js | 7 ++++++- src/transforms/compression.js | 32 +++++++++++++++++--------------- 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/src/datatypes/minecraft.js b/src/datatypes/minecraft.js index 83ab50e8a..1b6161862 100644 --- a/src/datatypes/minecraft.js +++ b/src/datatypes/minecraft.js @@ -56,7 +56,12 @@ function readCompressedNbt (buffer, offset) { const compressedNbt = buffer.slice(offset + 2, offset + 2 + length) - const nbtBuffer = zlib.gunzipSync(compressedNbt) // TODO: async + let nbtBuffer + try { + nbtBuffer = zlib.gunzipSync(compressedNbt) // TODO: async + } catch (err) { + throw new PartialReadError('zlib decompress failed: ' + err.message) + } const results = nbt.proto.read(nbtBuffer, 0, 'nbt') return { diff --git a/src/transforms/compression.js b/src/transforms/compression.js index 45b063522..718e7fed9 100644 --- a/src/transforms/compression.js +++ b/src/transforms/compression.js @@ -20,14 +20,16 @@ class Compressor extends Transform { _transform (chunk, enc, cb) { if (chunk.length >= this.compressionThreshold) { - zlib.deflate(chunk, (err, newChunk) => { - if (err) { return cb(err) } + try { + const newChunk = zlib.deflateSync(chunk) const buf = Buffer.alloc(sizeOfVarInt(chunk.length) + newChunk.length) const offset = writeVarInt(chunk.length, buf, 0) newChunk.copy(buf, offset) this.push(buf) return cb() - }) + } catch (err) { + return cb(err) + } } else { const buf = Buffer.alloc(sizeOfVarInt(0) + chunk.length) const offset = writeVarInt(0, buf, 0) @@ -52,23 +54,23 @@ class Decompressor extends Transform { this.push(chunk.slice(size)) return cb() } else { - zlib.unzip(chunk.slice(size), { finishFlush: 2 /* Z_SYNC_FLUSH = 2, but when using Browserify/Webpack it doesn't exist */ }, (err, newBuf) => { /** Fix by lefela4. */ - if (err) { - if (!this.hideErrors) { - console.error('problem inflating chunk') - console.error('uncompressed length ' + value) - console.error('compressed length ' + chunk.length) - console.error('hex ' + chunk.toString('hex')) - console.log(err) - } - return cb() - } + try { + const newBuf = zlib.unzipSync(chunk.slice(size), { finishFlush: 2 }) if (newBuf.length !== value && !this.hideErrors) { console.error('uncompressed length should be ' + value + ' but is ' + newBuf.length) } this.push(newBuf) return cb() - }) + } catch (err) { + if (!this.hideErrors) { + console.error('problem inflating chunk') + console.error('uncompressed length ' + value) + console.error('compressed length ' + chunk.length) + console.error('hex ' + chunk.toString('hex')) + console.log(err) + } + return cb() + } } } }