From 43f025c8f2e740ed03860c3f759a62a1bacc205d Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Fri, 15 Aug 2025 19:40:06 +0200 Subject: [PATCH 01/22] Make CircularBuffer part of the public API by moving it to the utils package --- .../archivers/zip/ExplodingInputStream.java | 1 + .../zip => utils}/CircularBuffer.java | 30 +- .../archivers/zip/CircularBufferTest.java | 82 ----- .../compress/utils/CircularBufferTest.java | 312 ++++++++++++++++++ 4 files changed, 336 insertions(+), 89 deletions(-) rename src/main/java/org/apache/commons/compress/{archivers/zip => utils}/CircularBuffer.java (77%) delete mode 100644 src/test/java/org/apache/commons/compress/archivers/zip/CircularBufferTest.java create mode 100644 src/test/java/org/apache/commons/compress/utils/CircularBufferTest.java diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java index b03798ab76d..08f98fb7852 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java @@ -23,6 +23,7 @@ import java.io.InputStream; import org.apache.commons.compress.archivers.ArchiveException; +import org.apache.commons.compress.utils.CircularBuffer; import org.apache.commons.compress.utils.InputStreamStatistics; import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.io.input.CloseShieldInputStream; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/CircularBuffer.java b/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java similarity index 77% rename from src/main/java/org/apache/commons/compress/archivers/zip/CircularBuffer.java rename to src/main/java/org/apache/commons/compress/utils/CircularBuffer.java index 54784052d27..7f457640c51 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/CircularBuffer.java +++ b/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java @@ -17,14 +17,14 @@ * under the License. */ -package org.apache.commons.compress.archivers.zip; +package org.apache.commons.compress.utils; /** * Circular byte buffer. * - * @since 1.7 + * @since 1.29 */ -final class CircularBuffer { +public class CircularBuffer { /** Size of the buffer */ private final int size; @@ -38,9 +38,12 @@ final class CircularBuffer { /** Index of the next data written in the buffer */ private int writeIndex; - CircularBuffer(final int size) { + private int bytesAvailable; + + public CircularBuffer(final int size) { this.size = size; buffer = new byte[size]; + bytesAvailable = 0; } /** @@ -49,7 +52,7 @@ final class CircularBuffer { * @return Whether a new byte can be read from the buffer. */ public boolean available() { - return readIndex != writeIndex; + return bytesAvailable > 0; } /** @@ -59,11 +62,18 @@ public boolean available() { * @param length the number of bytes to copy */ public void copy(final int distance, final int length) { + if (distance < 1) { + throw new IllegalArgumentException("Distance must be at least 1"); + } + + if (distance > size) { + throw new IllegalArgumentException("Distance exceeds buffer size"); + } + final int pos1 = writeIndex - distance; final int pos2 = pos1 + length; for (int i = pos1; i < pos2; i++) { - buffer[writeIndex] = buffer[(i + size) % size]; - writeIndex = (writeIndex + 1) % size; + put(buffer[(i + size) % size]); } } @@ -76,6 +86,7 @@ public int get() { if (available()) { final int value = buffer[readIndex]; readIndex = (readIndex + 1) % size; + bytesAvailable--; return value & 0xFF; } return -1; @@ -87,7 +98,12 @@ public int get() { * @param value the value to put. */ public void put(final int value) { + if(bytesAvailable == size) { + throw new IllegalStateException("Buffer overflow: Cannot write to a full buffer"); + } + buffer[writeIndex] = (byte) value; writeIndex = (writeIndex + 1) % size; + bytesAvailable++; } } diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/CircularBufferTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/CircularBufferTest.java deleted file mode 100644 index e5656d00cf2..00000000000 --- a/src/test/java/org/apache/commons/compress/archivers/zip/CircularBufferTest.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.commons.compress.archivers.zip; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import org.junit.jupiter.api.Test; - -class CircularBufferTest { - - @Test - void testCopy() { - final CircularBuffer buffer = new CircularBuffer(16); - - buffer.put(1); - buffer.put(2); - buffer.get(); - buffer.get(); - - // copy uninitialized data - buffer.copy(6, 8); - - for (int i = 2; i < 6; i++) { - assertEquals(0, buffer.get(), "buffer[" + i + "]"); - } - assertEquals(1, buffer.get(), "buffer[" + 6 + "]"); - assertEquals(2, buffer.get(), "buffer[" + 7 + "]"); - assertEquals(0, buffer.get(), "buffer[" + 8 + "]"); - assertEquals(0, buffer.get(), "buffer[" + 9 + "]"); - - for (int i = 10; i < 14; i++) { - buffer.put(i); - buffer.get(); - } - - assertFalse(buffer.available(), "available"); - - // copy data and wrap - buffer.copy(2, 8); - - for (int i = 14; i < 18; i++) { - assertEquals(i % 2 == 0 ? 12 : 13, buffer.get(), "buffer[" + i + "]"); - } - } - - @Test - void testPutAndGet() { - final int size = 16; - final CircularBuffer buffer = new CircularBuffer(size); - for (int i = 0; i < size / 2; i++) { - buffer.put(i); - } - - assertTrue(buffer.available(), "available"); - - for (int i = 0; i < size / 2; i++) { - assertEquals(i, buffer.get(), "buffer[" + i + "]"); - } - - assertEquals(-1, buffer.get()); - assertFalse(buffer.available(), "available"); - } -} diff --git a/src/test/java/org/apache/commons/compress/utils/CircularBufferTest.java b/src/test/java/org/apache/commons/compress/utils/CircularBufferTest.java new file mode 100644 index 00000000000..a83939cca8b --- /dev/null +++ b/src/test/java/org/apache/commons/compress/utils/CircularBufferTest.java @@ -0,0 +1,312 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import org.junit.jupiter.api.Test; + +class CircularBufferTest { + @Test + void testPutAndGet1() { + final int size = 16; + final CircularBuffer buffer = new CircularBuffer(size); + for (int i = 0; i < size / 2; i++) { + buffer.put(i); + } + + assertTrue(buffer.available(), "available"); + + for (int i = 0; i < size / 2; i++) { + assertEquals(i, buffer.get(), "buffer[" + i + "]"); + } + + assertEquals(-1, buffer.get()); + assertFalse(buffer.available(), "available"); + } + + @Test + void testPutAndGet2() { + CircularBuffer buffer = new CircularBuffer(8); + + // Nothing to read + assertFalse(buffer.available()); + assertEquals(-1, buffer.get()); + + // Write a byte and read it + buffer.put(0x01); + assertTrue(buffer.available()); + assertEquals(0x01, buffer.get()); + assertFalse(buffer.available()); + assertEquals(-1, buffer.get()); + + // Write multiple bytes and read them + buffer.put(0x02); + buffer.put(0x03); + buffer.put(0x04); + assertTrue(buffer.available()); + assertEquals(0x02, buffer.get()); + assertEquals(0x03, buffer.get()); + assertEquals(0x04, buffer.get()); + assertFalse(buffer.available()); + assertEquals(-1, buffer.get()); + } + + @Test + void testPutAndGetWrappingAround() { + CircularBuffer buffer = new CircularBuffer(4); + + // Nothing to read + assertFalse(buffer.available()); + assertEquals(-1, buffer.get()); + + // Write two bytes and read them in a loop making the buffer wrap around several times + for (int i=0;i<8;i++) { + buffer.put(i * 2); + buffer.put(i * 2 + 1); + + assertTrue(buffer.available()); + assertEquals(i * 2, buffer.get()); + assertEquals(i * 2 + 1, buffer.get()); + assertFalse(buffer.available()); + assertEquals(-1, buffer.get()); + } + } + + @Test + void testPutOverflow() { + CircularBuffer buffer = new CircularBuffer(4); + + // Write more bytes than the buffer can hold + buffer.put(0x01); + buffer.put(0x02); + buffer.put(0x03); + buffer.put(0x04); + + try { + buffer.put(0x05); + fail("Expected IllegalStateException for buffer overflow"); + } catch (IllegalStateException e) { + assertEquals("Buffer overflow: Cannot write to a full buffer", e.getMessage()); + } + } + + @Test + void testCopy1() { + final CircularBuffer buffer = new CircularBuffer(16); + + buffer.put(1); + buffer.put(2); + buffer.get(); + buffer.get(); + + // copy uninitialized data + buffer.copy(6, 8); + + for (int i = 2; i < 6; i++) { + assertEquals(0, buffer.get(), "buffer[" + i + "]"); + } + assertEquals(1, buffer.get(), "buffer[" + 6 + "]"); + assertEquals(2, buffer.get(), "buffer[" + 7 + "]"); + assertEquals(0, buffer.get(), "buffer[" + 8 + "]"); + assertEquals(0, buffer.get(), "buffer[" + 9 + "]"); + + for (int i = 10; i < 14; i++) { + buffer.put(i); + buffer.get(); + } + + assertFalse(buffer.available(), "available"); + + // copy data and wrap + buffer.copy(2, 8); + + for (int i = 14; i < 18; i++) { + assertEquals(i % 2 == 0 ? 12 : 13, buffer.get(), "buffer[" + i + "]"); + } + } + + @Test + void testCopy2() { + CircularBuffer buffer = new CircularBuffer(16); + + // Write some bytes + buffer.put(0x01); + buffer.put(0x02); + buffer.put(0x03); + buffer.put(0x04); + + buffer.copy(2, 2); // Copy last two bytes (0x03, 0x04) + + assertEquals(0x01, buffer.get()); + assertEquals(0x02, buffer.get()); + assertEquals(0x03, buffer.get()); + assertEquals(0x04, buffer.get()); + assertEquals(0x03, buffer.get()); + assertEquals(0x04, buffer.get()); + + assertFalse(buffer.available()); + assertEquals(-1, buffer.get()); + } + + @Test + void testCopy3() { + CircularBuffer buffer = new CircularBuffer(16); + + // Write some bytes + buffer.put(0x01); + buffer.put(0x02); + buffer.put(0x03); + buffer.put(0x04); + + buffer.copy(4, 2); // Copy first two bytes (0x01, 0x02) + + assertEquals(0x01, buffer.get()); + assertEquals(0x02, buffer.get()); + assertEquals(0x03, buffer.get()); + assertEquals(0x04, buffer.get()); + assertEquals(0x01, buffer.get()); // Copied byte + assertEquals(0x02, buffer.get()); // Copied byte + + assertFalse(buffer.available()); + assertEquals(-1, buffer.get()); + } + + @Test + void testCopy4() { + CircularBuffer buffer = new CircularBuffer(6); + + // Write some bytes + buffer.put(0x01); + buffer.put(0x02); + buffer.put(0x03); + buffer.put(0x04); + buffer.put(0x05); + buffer.put(0x06); + + // Read four bytes to make space + assertEquals(0x01, buffer.get()); + assertEquals(0x02, buffer.get()); + assertEquals(0x03, buffer.get()); + assertEquals(0x04, buffer.get()); + + // Write two more bytes and making the buffer wrap around + buffer.put(0x07); + buffer.put(0x08); + + buffer.copy(3, 2); // Copy two bytes from 3 bytes ago (0x06, 0x07) where the buffer wraps around + + // Read rest of the buffer + assertEquals(0x05, buffer.get()); + assertEquals(0x06, buffer.get()); + assertEquals(0x07, buffer.get()); + assertEquals(0x08, buffer.get()); + assertEquals(0x06, buffer.get()); // Copied byte + assertEquals(0x07, buffer.get()); // Copied byte + + assertFalse(buffer.available()); + assertEquals(-1, buffer.get()); + } + + @Test + void testCopyRunLengthEncoding1() { + CircularBuffer buffer = new CircularBuffer(16); + + // Write two bytes + buffer.put(0x01); + buffer.put(0x02); + + buffer.copy(1, 8); // Copy last byte (0x02) eight times + + // Read the buffer + assertEquals(0x01, buffer.get()); + assertEquals(0x02, buffer.get()); + assertEquals(0x02, buffer.get()); // Copied byte 1 + assertEquals(0x02, buffer.get()); // Copied byte 2 + assertEquals(0x02, buffer.get()); // Copied byte 3 + assertEquals(0x02, buffer.get()); // Copied byte 4 + assertEquals(0x02, buffer.get()); // Copied byte 5 + assertEquals(0x02, buffer.get()); // Copied byte 6 + assertEquals(0x02, buffer.get()); // Copied byte 7 + assertEquals(0x02, buffer.get()); // Copied byte 8 + + assertFalse(buffer.available()); + assertEquals(-1, buffer.get()); + } + + @Test + void testCopyDistanceInvalid() { + CircularBuffer buffer = new CircularBuffer(4); + + // Write some bytes + buffer.put(0x01); + buffer.put(0x02); + + try { + buffer.copy(0, 2); // Try to copy from distance 0 + fail("Expected IllegalArgumentException for invalid distance"); + } catch (IllegalArgumentException e) { + assertEquals("Distance must be at least 1", e.getMessage()); + } + } + + @Test + void testCopyDistanceExceedingBufferSize() { + CircularBuffer buffer = new CircularBuffer(4); + + // Write some bytes + buffer.put(0x01); + buffer.put(0x02); + buffer.put(0x03); + buffer.put(0x04); + + try { + buffer.copy(5, 2); // Try to copy from a distance that is bigger than the buffer size + fail("Expected IllegalArgumentException for distance exceeding buffer size"); + } catch (IllegalArgumentException e) { + assertEquals("Distance exceeds buffer size", e.getMessage()); + } + } + + @Test + void testCopyCausingBufferOverflow() { + CircularBuffer buffer = new CircularBuffer(4); + + // Write some bytes + buffer.put(0x01); + buffer.put(0x02); + buffer.put(0x03); + buffer.put(0x04); + + // Read some bytes to make space + assertEquals(0x01, buffer.get()); + assertEquals(0x02, buffer.get()); + + try { + buffer.copy(4, 4); // Copying 4 bytes and write to the buffer that will be full during copy + fail("Expected IllegalStateException for buffer overflow during copy"); + } catch (IllegalStateException e) { + assertEquals("Buffer overflow: Cannot write to a full buffer", e.getMessage()); + } + } +} From 416e1fb0ef2c550226b36b5c0ee985b3b36bc6d6 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sun, 17 Aug 2025 15:08:48 +0200 Subject: [PATCH 02/22] [COMPRESS-706] Add support for reading LHA archive format --- .../archivers/ArchiveStreamFactory.java | 30 +- .../archivers/ArchiveStreamProvider.java | 1 + .../commons/compress/archivers/lha/CRC16.java | 94 ++ .../archivers/lha/LhaArchiveEntry.java | 216 +++ .../archivers/lha/LhaArchiveInputStream.java | 701 +++++++++ .../compress/archivers/lha/package-info.java | 23 + ...tLhStaticHuffmanCompressorInputStream.java | 344 ++++ .../compress/compressors/lha/BinaryTree.java | 150 ++ .../lha/Lh4CompressorInputStream.java | 49 + .../lha/Lh5CompressorInputStream.java | 44 + .../lha/Lh6CompressorInputStream.java | 44 + .../lha/Lh7CompressorInputStream.java | 44 + .../compressors/lha/package-info.java | 23 + .../compress/utils/CircularBuffer.java | 2 +- .../archivers/ArchiveStreamFactoryTest.java | 10 +- .../compress/archivers/lha/CRC16Test.java | 36 + .../archivers/lha/LhaArchiveEntryTest.java | 67 + .../lha/LhaArchiveInputStreamTest.java | 1381 +++++++++++++++++ ...taticHuffmanCompressorInputStreamTest.java | 135 ++ .../compressors/lha/BinaryTreeTest.java | 244 +++ .../lha/Lh4CompressorInputStreamTest.java | 70 + .../lha/Lh5CompressorInputStreamTest.java | 69 + .../lha/Lh6CompressorInputStreamTest.java | 69 + .../lha/Lh7CompressorInputStreamTest.java | 69 + .../compress/utils/CircularBufferTest.java | 22 +- src/test/resources/bla.lha | Bin 0 -> 55 bytes src/test/resources/test-amiga-l0-lh4.lha | Bin 0 -> 41623 bytes src/test/resources/test-amiga-l2.lha | Bin 0 -> 145 bytes src/test/resources/test-macos-l0-lh5-rle.lha | Bin 0 -> 55 bytes src/test/resources/test-macos-l0-lh5.lha | Bin 0 -> 40051 bytes src/test/resources/test-macos-l0-lh6.lha | Bin 0 -> 38089 bytes src/test/resources/test-macos-l0-lh7.lha | Bin 0 -> 37453 bytes src/test/resources/test-macos-l0.lha | Bin 0 -> 278 bytes src/test/resources/test-macos-l1.lha | Bin 0 -> 343 bytes src/test/resources/test-macos-l2.lha | Bin 0 -> 343 bytes src/test/resources/test-msdos-l0.lha | Bin 0 -> 220 bytes src/test/resources/test-msdos-l1.lha | Bin 0 -> 290 bytes src/test/resources/test-msdos-l2-attrib.lha | Bin 0 -> 63 bytes src/test/resources/test-msdos-l2.lha | Bin 0 -> 300 bytes 39 files changed, 3923 insertions(+), 14 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress/archivers/lha/CRC16.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/lha/package-info.java create mode 100644 src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java create mode 100644 src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java create mode 100644 src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java create mode 100644 src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java create mode 100644 src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java create mode 100644 src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java create mode 100644 src/main/java/org/apache/commons/compress/compressors/lha/package-info.java create mode 100644 src/test/java/org/apache/commons/compress/archivers/lha/CRC16Test.java create mode 100644 src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java create mode 100644 src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java create mode 100644 src/test/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStreamTest.java create mode 100644 src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java create mode 100644 src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java create mode 100644 src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java create mode 100644 src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java create mode 100644 src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java create mode 100644 src/test/resources/bla.lha create mode 100644 src/test/resources/test-amiga-l0-lh4.lha create mode 100644 src/test/resources/test-amiga-l2.lha create mode 100644 src/test/resources/test-macos-l0-lh5-rle.lha create mode 100644 src/test/resources/test-macos-l0-lh5.lha create mode 100644 src/test/resources/test-macos-l0-lh6.lha create mode 100644 src/test/resources/test-macos-l0-lh7.lha create mode 100644 src/test/resources/test-macos-l0.lha create mode 100644 src/test/resources/test-macos-l1.lha create mode 100644 src/test/resources/test-macos-l2.lha create mode 100644 src/test/resources/test-msdos-l0.lha create mode 100644 src/test/resources/test-msdos-l1.lha create mode 100644 src/test/resources/test-msdos-l2-attrib.lha create mode 100644 src/test/resources/test-msdos-l2.lha diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index 2c33d769d91..4ec0eed61cb 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -37,6 +37,7 @@ import org.apache.commons.compress.archivers.dump.DumpArchiveInputStream; import org.apache.commons.compress.archivers.jar.JarArchiveInputStream; import org.apache.commons.compress.archivers.jar.JarArchiveOutputStream; +import org.apache.commons.compress.archivers.lha.LhaArchiveInputStream; import org.apache.commons.compress.archivers.sevenz.SevenZFile; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; @@ -90,6 +91,8 @@ public class ArchiveStreamFactory implements ArchiveStreamProvider { private static final int DUMP_SIGNATURE_SIZE = 32; + private static final int LHA_SIGNATURE_SIZE = 22; + private static final int SIGNATURE_SIZE = 12; /** @@ -174,6 +177,13 @@ public class ArchiveStreamFactory implements ArchiveStreamProvider { */ public static final String JAR = "jar"; + /** + * Constant (value {@value}) used to identify the LHA archive format. + * Not supported as an output stream type. + * @since 1.29 + */ + public static final String LHA = "lha"; + /** * Constant used to identify the TAR archive format. * @@ -255,6 +265,18 @@ public static String detect(final InputStream in) throws ArchiveException { if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) { return DUMP; } + // LHA needs a bigger buffer to check the signature + final byte[] lhasig = new byte[LHA_SIGNATURE_SIZE]; + in.mark(lhasig.length); + try { + signatureLength = IOUtils.readFully(in, lhasig); + in.reset(); + } catch (final IOException e) { + throw new ArchiveException("IOException while reading LHA signature", (Throwable) e); + } + if (LhaArchiveInputStream.matches(lhasig, signatureLength)) { + return LHA; + } // Tar needs an even bigger buffer to check the signature; read the first block final byte[] tarHeader = new byte[TAR_HEADER_SIZE]; in.mark(tarHeader.length); @@ -439,6 +461,12 @@ public > I createArchiveInp } return (I) arjBuilder.get(); } + if (LHA.equalsIgnoreCase(archiverName)) { + if (actualEncoding != null) { + return (I) new LhaArchiveInputStream(in, actualEncoding); + } + return (I) new LhaArchiveInputStream(in); + } if (ZIP.equalsIgnoreCase(archiverName)) { final ZipArchiveInputStream.Builder zipBuilder = ZipArchiveInputStream.builder().setInputStream(in); if (actualEncoding != null) { @@ -593,7 +621,7 @@ public String getEntryEncoding() { @Override public Set getInputStreamArchiveNames() { - return Sets.newHashSet(AR, ARJ, ZIP, TAR, JAR, CPIO, DUMP, SEVEN_Z); + return Sets.newHashSet(AR, ARJ, LHA, ZIP, TAR, JAR, CPIO, DUMP, SEVEN_Z); } @Override diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java index 7c9d93888b8..4eb596eb10c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java @@ -36,6 +36,7 @@ public interface ArchiveStreamProvider { * @param The {@link ArchiveInputStream} type. * @param archiverName the archiver name, i.e. {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#AR}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ARJ}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#LHA}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ZIP}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#TAR}, * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#JAR}, diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/CRC16.java b/src/main/java/org/apache/commons/compress/archivers/lha/CRC16.java new file mode 100644 index 00000000000..bcee1d25b5d --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/lha/CRC16.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.lha; + +import java.util.zip.Checksum; + +/** + * CRC-16 checksum implementation based on polynomial x^16 + x^15 + x^2 + 1 (0x8005) and + * the initial value 0x0000. This CRC variant is also known as CRC-16-MODBUS, CRC-16-IBM + * and CRC-16-ANSI. + */ +class CRC16 implements Checksum { + private static final int INITIAL_VALUE = 0x0000; + + private static final int[] CRC16_TABLE = { + 0x0000, 0xC0C1, 0xC181, 0x0140, 0xC301, 0x03C0, 0x0280, 0xC241, + 0xC601, 0x06C0, 0x0780, 0xC741, 0x0500, 0xC5C1, 0xC481, 0x0440, + 0xCC01, 0x0CC0, 0x0D80, 0xCD41, 0x0F00, 0xCFC1, 0xCE81, 0x0E40, + 0x0A00, 0xCAC1, 0xCB81, 0x0B40, 0xC901, 0x09C0, 0x0880, 0xC841, + 0xD801, 0x18C0, 0x1980, 0xD941, 0x1B00, 0xDBC1, 0xDA81, 0x1A40, + 0x1E00, 0xDEC1, 0xDF81, 0x1F40, 0xDD01, 0x1DC0, 0x1C80, 0xDC41, + 0x1400, 0xD4C1, 0xD581, 0x1540, 0xD701, 0x17C0, 0x1680, 0xD641, + 0xD201, 0x12C0, 0x1380, 0xD341, 0x1100, 0xD1C1, 0xD081, 0x1040, + 0xF001, 0x30C0, 0x3180, 0xF141, 0x3300, 0xF3C1, 0xF281, 0x3240, + 0x3600, 0xF6C1, 0xF781, 0x3740, 0xF501, 0x35C0, 0x3480, 0xF441, + 0x3C00, 0xFCC1, 0xFD81, 0x3D40, 0xFF01, 0x3FC0, 0x3E80, 0xFE41, + 0xFA01, 0x3AC0, 0x3B80, 0xFB41, 0x3900, 0xF9C1, 0xF881, 0x3840, + 0x2800, 0xE8C1, 0xE981, 0x2940, 0xEB01, 0x2BC0, 0x2A80, 0xEA41, + 0xEE01, 0x2EC0, 0x2F80, 0xEF41, 0x2D00, 0xEDC1, 0xEC81, 0x2C40, + 0xE401, 0x24C0, 0x2580, 0xE541, 0x2700, 0xE7C1, 0xE681, 0x2640, + 0x2200, 0xE2C1, 0xE381, 0x2340, 0xE101, 0x21C0, 0x2080, 0xE041, + 0xA001, 0x60C0, 0x6180, 0xA141, 0x6300, 0xA3C1, 0xA281, 0x6240, + 0x6600, 0xA6C1, 0xA781, 0x6740, 0xA501, 0x65C0, 0x6480, 0xA441, + 0x6C00, 0xACC1, 0xAD81, 0x6D40, 0xAF01, 0x6FC0, 0x6E80, 0xAE41, + 0xAA01, 0x6AC0, 0x6B80, 0xAB41, 0x6900, 0xA9C1, 0xA881, 0x6840, + 0x7800, 0xB8C1, 0xB981, 0x7940, 0xBB01, 0x7BC0, 0x7A80, 0xBA41, + 0xBE01, 0x7EC0, 0x7F80, 0xBF41, 0x7D00, 0xBDC1, 0xBC81, 0x7C40, + 0xB401, 0x74C0, 0x7580, 0xB541, 0x7700, 0xB7C1, 0xB681, 0x7640, + 0x7200, 0xB2C1, 0xB381, 0x7340, 0xB101, 0x71C0, 0x7080, 0xB041, + 0x5000, 0x90C1, 0x9181, 0x5140, 0x9301, 0x53C0, 0x5280, 0x9241, + 0x9601, 0x56C0, 0x5780, 0x9741, 0x5500, 0x95C1, 0x9481, 0x5440, + 0x9C01, 0x5CC0, 0x5D80, 0x9D41, 0x5F00, 0x9FC1, 0x9E81, 0x5E40, + 0x5A00, 0x9AC1, 0x9B81, 0x5B40, 0x9901, 0x59C0, 0x5880, 0x9841, + 0x8801, 0x48C0, 0x4980, 0x8941, 0x4B00, 0x8BC1, 0x8A81, 0x4A40, + 0x4E00, 0x8EC1, 0x8F81, 0x4F40, 0x8D01, 0x4DC0, 0x4C80, 0x8C41, + 0x4400, 0x84C1, 0x8581, 0x4540, 0x8701, 0x47C0, 0x4680, 0x8641, + 0x8201, 0x42C0, 0x4380, 0x8341, 0x4100, 0x81C1, 0x8081, 0x4040 + }; + + private int crc; + + CRC16() { + reset(); + } + + @Override + public long getValue() { + return this.crc; + } + + @Override + public void reset() { + this.crc = INITIAL_VALUE; + } + + @Override + public void update(final int b) { + this.crc = (this.crc >>> 8) ^ CRC16_TABLE[(this.crc ^ b) & 0xff]; + } + + @Override + public void update(final byte[] b, final int off, final int len) { + for (int i = 0; i < len; i++) { + update(b[off + i]); + } + } +} diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java new file mode 100644 index 00000000000..1d158e24fed --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java @@ -0,0 +1,216 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.lha; + +import java.time.ZoneOffset; +import java.util.Date; +import java.util.Optional; + +import org.apache.commons.compress.archivers.ArchiveEntry; + +/** + * Represents an entry in a LHA archive. + * + * @since 1.29 + */ +public class LhaArchiveEntry implements ArchiveEntry { + private String name; + private boolean directory; + private long size; + private Date lastModifiedDate; + private long compressedSize; + private String compressionMethod; + private int crcValue; + private Optional osId = Optional.empty(); + private Optional unixPermissionMode = Optional.empty(); + private Optional unixUserId = Optional.empty(); + private Optional unixGroupId = Optional.empty(); + private Optional msdosFileAttributes = Optional.empty(); + private Optional headerCrc = Optional.empty(); + + public LhaArchiveEntry() { + } + + @Override + public String toString() { + final StringBuffer sb = new StringBuffer().append("LhaArchiveEntry[") + .append("name=").append(name) + .append(",directory=").append(directory) + .append(",size=").append(size) + .append(",lastModifiedDate=").append(lastModifiedDate == null ? "" : lastModifiedDate.toInstant().atZone(ZoneOffset.UTC).toString()) + .append(",compressedSize=").append(compressedSize) + .append(",compressionMethod=").append(compressionMethod) + .append(",crcValue=").append(String.format("0x%04x", crcValue)); + + if (osId.isPresent()) { + sb.append(",osId=").append(osId.get()); + } + + if (unixPermissionMode.isPresent()) { + sb.append(",unixPermissionMode=").append(String.format("%03o", unixPermissionMode.get())); + } + + if (msdosFileAttributes.isPresent()) { + sb.append(",msdosFileAttributes=").append(String.format("%04x", msdosFileAttributes.get())); + } + + if (headerCrc.isPresent()) { + sb.append(",headerCrc=").append(String.format("0x%04x", headerCrc.get())); + } + + return sb.append("]").toString(); + } + + @Override + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public long getSize() { + return size; + } + + public void setSize(long size) { + this.size = size; + } + + @Override + public Date getLastModifiedDate() { + return lastModifiedDate; + } + + public void setLastModifiedDate(Date lastModifiedDate) { + this.lastModifiedDate = lastModifiedDate; + } + + /** + * Returns the compressed size of this entry. + * + * @return the compressed size + */ + public long getCompressedSize() { + return compressedSize; + } + + public void setCompressedSize(long compressedSize) { + this.compressedSize = compressedSize; + } + + public void setDirectory(boolean directory) { + this.directory = directory; + } + + @Override + public boolean isDirectory() { + return directory; + } + + /** + * Returns the compression method of this entry. + * + * @return the compression method + */ + public String getCompressionMethod() { + return compressionMethod; + } + + public void setCompressionMethod(String compressionMethod) { + this.compressionMethod = compressionMethod; + } + + /** + * Returns the CRC-16 checksum of the uncompressed data of this entry. + * + * @return CRC-16 checksum of the uncompressed data + */ + public int getCrcValue() { + return crcValue; + } + + public void setCrcValue(int crc) { + this.crcValue = crc; + } + + /** + * Returns the operating system id if available for this entry. + * + * @return operating system id if available + */ + public Optional getOsId() { + return osId; + } + + public void setOsId(Optional osId) { + this.osId = osId; + } + + public Optional getUnixPermissionMode() { + return unixPermissionMode; + } + + public void setUnixPermissionMode(Optional unixPermissionMode) { + this.unixPermissionMode = unixPermissionMode; + } + + public Optional getUnixUserId() { + return unixUserId; + } + + public void setUnixUserId(Optional unixUserId) { + this.unixUserId = unixUserId; + } + + public Optional getUnixGroupId() { + return unixGroupId; + } + + public void setUnixGroupId(Optional unixGroupId) { + this.unixGroupId = unixGroupId; + } + + /** + * Returns the MS-DOS file attributes if available for this entry. + * + * @return MS-DOS file attributes if available + */ + public Optional getMsdosFileAttributes() { + return msdosFileAttributes; + } + + public void setMsdosFileAttributes(Optional msdosFileAttributes) { + this.msdosFileAttributes = msdosFileAttributes; + } + + /** + * Don't expose the header CRC publicly, as it is of no interest to most users. + */ + Optional getHeaderCrc() { + return headerCrc; + } + + void setHeaderCrc(Optional headerCrc) { + this.headerCrc = headerCrc; + } +} diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java new file mode 100644 index 00000000000..cd79abd9c35 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -0,0 +1,701 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.lha; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Optional; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveException; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.archivers.zip.ZipUtil; +import org.apache.commons.compress.compressors.lha.Lh4CompressorInputStream; +import org.apache.commons.compress.compressors.lha.Lh5CompressorInputStream; +import org.apache.commons.compress.compressors.lha.Lh6CompressorInputStream; +import org.apache.commons.compress.compressors.lha.Lh7CompressorInputStream; +import org.apache.commons.io.IOUtils; +import org.apache.commons.io.input.BoundedInputStream; +import org.apache.commons.io.input.ChecksumInputStream; + +/** + * Implements the LHA archive format as an InputStream. + * + * This implementation is based on the documentation that can be found at + * http://dangan.g.dgdg.jp/en/Content/Program/Java/jLHA/Notes/Notes.html + * + * @NotThreadSafe + * @since 1.29 + */ +public class LhaArchiveInputStream extends ArchiveInputStream { + // Fields that are the same across all header levels + private static final int HEADER_GENERIC_MINIMUM_HEADER_LENGTH = 22; + private static final int HEADER_GENERIC_OFFSET_COMPRESSION_METHOD = 2; + private static final int HEADER_GENERIC_OFFSET_HEADER_LEVEL = 20; + + // Header Level 0 + private static final int HEADER_LEVEL_0_OFFSET_HEADER_SIZE = 0; + private static final int HEADER_LEVEL_0_OFFSET_HEADER_CHECKSUM = 1; + private static final int HEADER_LEVEL_0_OFFSET_COMPRESSED_SIZE = 7; + private static final int HEADER_LEVEL_0_OFFSET_ORIGINAL_SIZE = 11; + private static final int HEADER_LEVEL_0_OFFSET_LAST_MODIFIED_DATE_TIME = 15; + private static final int HEADER_LEVEL_0_OFFSET_FILENAME_LENGTH = 21; + private static final int HEADER_LEVEL_0_OFFSET_FILENAME = 22; + + // Header Level 1 + private static final int HEADER_LEVEL_1_OFFSET_BASE_HEADER_SIZE = 0; + private static final int HEADER_LEVEL_1_OFFSET_BASE_HEADER_CHECKSUM = 1; + private static final int HEADER_LEVEL_1_OFFSET_SKIP_SIZE = 7; + private static final int HEADER_LEVEL_1_OFFSET_ORIGINAL_SIZE = 11; + private static final int HEADER_LEVEL_1_OFFSET_LAST_MODIFIED_DATE_TIME = 15; + private static final int HEADER_LEVEL_1_OFFSET_FILENAME_LENGTH = 21; + private static final int HEADER_LEVEL_1_OFFSET_FILENAME = 22; + + // Header Level 2 + private static final int HEADER_LEVEL_2_OFFSET_HEADER_SIZE = 0; + private static final int HEADER_LEVEL_2_OFFSET_COMPRESSED_SIZE = 7; + private static final int HEADER_LEVEL_2_OFFSET_ORIGINAL_SIZE = 11; + private static final int HEADER_LEVEL_2_OFFSET_LAST_MODIFIED_DATE_TIME = 15; + private static final int HEADER_LEVEL_2_OFFSET_CRC = 21; + private static final int HEADER_LEVEL_2_OFFSET_OS_ID = 23; + private static final int HEADER_LEVEL_2_OFFSET_FIRST_EXTENDED_HEADER_SIZE = 24; + + // Extended header types + private static final int EXTENDED_HEADER_TYPE_COMMON = 0x00; + private static final int EXTENDED_HEADER_TYPE_FILENAME = 0x01; + private static final int EXTENDED_HEADER_TYPE_DIRECTORY_NAME = 0x02; + + private static final int EXTENDED_HEADER_TYPE_MSDOS_FILE_ATTRIBUTES = 0x40; + + private static final int EXTENDED_HEADER_TYPE_UNIX_PERMISSION = 0x50; + private static final int EXTENDED_HEADER_TYPE_UNIX_UID_GID = 0x51; + private static final int EXTENDED_HEADER_TYPE_UNIX_TIMESTAMP = 0x54; + + // Compression methods + private static final String COMPRESSION_METHOD_DIRECTORY = "-lhd-"; // Directory entry + private static final String COMPRESSION_METHOD_LH0 = "-lh0-"; + private static final String COMPRESSION_METHOD_LH4 = "-lh4-"; + private static final String COMPRESSION_METHOD_LH5 = "-lh5-"; + private static final String COMPRESSION_METHOD_LH6 = "-lh6-"; + private static final String COMPRESSION_METHOD_LH7 = "-lh7-"; + private static final String COMPRESSION_METHOD_LZ4 = "-lz4-"; + + private final char fileSeparatorChar; + private LhaArchiveEntry currentEntry; + private InputStream currentCompressedStream; + private InputStream currentDecompressedStream; + + /** + * Constructs the LhaArchiveInputStream, taking ownership of the inputStream that is passed in. + * + * @param inputStream the underlying stream, whose ownership is taken + */ + public LhaArchiveInputStream(final InputStream inputStream) { + this(inputStream, null); + } + + /** + * Constructs the LhaArchiveInputStream, taking ownership of the inputStream that is passed in. + * + * @param inputStream the underlying stream, whose ownership is taken + * @param charsetName the charset used for file names in the archive. May be {@code null} to use US-ASCII as default. + */ + public LhaArchiveInputStream(final InputStream inputStream, final String charsetName) { + this(inputStream, charsetName, File.separatorChar); + } + + /** + * Constructs the LhaArchiveInputStream, taking ownership of the inputStream that is passed in. + * + * @param inputStream the underlying stream, whose ownership is taken + * @param charsetName the charset used for file names in the archive. May be {@code null} to use US-ASCII as default. + * @param fileSeparatorChar the character used to separate file path elements + */ + public LhaArchiveInputStream(final InputStream inputStream, final String charsetName, final char fileSeparatorChar) { + super(inputStream, charsetName == null ? StandardCharsets.US_ASCII.name() : charsetName); + this.fileSeparatorChar = fileSeparatorChar; + } + + @Override + public boolean canReadEntryData(final ArchiveEntry archiveEntry) { + return currentDecompressedStream != null; + } + + @Override + public int read(final byte[] buffer, final int offset, final int length) throws IOException { + if (currentEntry == null) { + throw new IllegalStateException("No current entry"); + } + + if (currentDecompressedStream == null) { + throw new ArchiveException("Unsupported compression method: %s", currentEntry.getCompressionMethod()); + } + + return currentDecompressedStream.read(buffer, offset, length); + } + + /** + * Checks if the signature matches what is expected for an LHA file. There is no specific + * signature for LHA files, so this method checks if the header level and the compression + * method are valid for an LHA archive. The signature must be at least the minimum header + * length of 22 bytes for this check to work properly. + * + * @param signature the bytes to check + * @param length the number of bytes to check + * @return true, if this stream is an LHA archive stream, false otherwise + */ + public static boolean matches(final byte[] signature, final int length) { + if (signature.length < HEADER_GENERIC_MINIMUM_HEADER_LENGTH || length < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { + return false; + } + + final ByteBuffer header = ByteBuffer.wrap(signature).order(ByteOrder.LITTLE_ENDIAN); + + // Determine header level. Expected value is in the range 0-3. + final byte headerLevel = header.get(HEADER_GENERIC_OFFSET_HEADER_LEVEL); + if (headerLevel < 0 || headerLevel > 3) { + return false; + } + + // Check if the compression method is valid for LHA archives + try { + getCompressionMethod(header); + } catch (ArchiveException e) { + return false; + } + + return true; + } + + @Override + public LhaArchiveEntry getNextEntry() throws IOException { + if (this.currentCompressedStream != null) { + // Consume the entire compressed stream to end up at the next entry + IOUtils.consume(this.currentCompressedStream); + + this.currentCompressedStream = null; + this.currentDecompressedStream = null; + } + + this.currentEntry = readHeader(); + + return this.currentEntry; + } + + /** + * Read the next LHA header from the input stream. + * + * @return the next header entry, or null if there are no more entries + * @throws IOException + */ + protected LhaArchiveEntry readHeader() throws IOException { + // Header level is not known yet. Read the minimum length header. + final byte[] buffer = new byte[HEADER_GENERIC_MINIMUM_HEADER_LENGTH]; + final int len = in.read(buffer); + if (len == -1) { + // EOF + return null; + } else if (len == 1 && buffer[0] == 0) { + // Last byte of the file is zero indicating no more entries + return null; + } else if (len < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { + throw new ArchiveException("Invalid header length"); + } + + final ByteBuffer header = ByteBuffer.wrap(buffer).order(ByteOrder.LITTLE_ENDIAN); + + // Determine header level + final byte headerLevel = header.get(HEADER_GENERIC_OFFSET_HEADER_LEVEL); + if (headerLevel == 0) { + return readHeaderLevel0(header); + } else if (headerLevel == 1) { + return readHeaderLevel1(header); + } else if (headerLevel == 2) { + return readHeaderLevel2(header); + } else { + throw new ArchiveException("Invalid header level: %d", headerLevel); + } + } + + /** + * Read LHA header level 0. + * + * @param buffer the buffer containing the header data + * @return the LhaArchiveEntry read from the buffer + * @throws IOException + */ + protected LhaArchiveEntry readHeaderLevel0(ByteBuffer buffer) throws IOException { + final int headerSize = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_HEADER_SIZE)); + if (headerSize < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { + throw new ArchiveException("Invalid header level 0 length: %d", headerSize); + } + + buffer = readRemainingHeaderData(buffer, headerSize + 2); // Header size is not including the first two bytes of the header + + final int headerChecksum = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_HEADER_CHECKSUM)); + + final LhaArchiveEntry entry = new LhaArchiveEntry(); + entry.setCompressionMethod(getCompressionMethod(buffer)); + entry.setCompressedSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_COMPRESSED_SIZE))); + entry.setSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_ORIGINAL_SIZE))); + entry.setLastModifiedDate(new Date(ZipUtil.dosToJavaTime(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_LAST_MODIFIED_DATE_TIME))))); + + final int filenameLength = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_FILENAME_LENGTH)); + buffer.position(HEADER_LEVEL_0_OFFSET_FILENAME); + entry.setName(getPathname(buffer, filenameLength)); + + entry.setDirectory(isDirectory(entry.getCompressionMethod())); + + entry.setCrcValue(Short.toUnsignedInt(buffer.getShort())); + + if (calculateHeaderChecksum(buffer) != headerChecksum) { + throw new ArchiveException("Invalid header level 0 checksum"); + } + + prepareDecompression(entry); + + return entry; + } + + /** + * Read LHA header level 1. + * + * @param buffer the buffer containing the header data + * @return the LhaArchiveEntry read from the buffer + * @throws IOException + */ + protected LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException { + final int baseHeaderSize = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_BASE_HEADER_SIZE)); + if (baseHeaderSize < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { + throw new ArchiveException("Invalid header level 1 length: %d", baseHeaderSize); + } + + buffer = readRemainingHeaderData(buffer, baseHeaderSize + 2); + + final int baseHeaderChecksum = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_BASE_HEADER_CHECKSUM)); + + final LhaArchiveEntry entry = new LhaArchiveEntry(); + entry.setCompressionMethod(getCompressionMethod(buffer)); + long skipSize = Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_1_OFFSET_SKIP_SIZE)); + entry.setSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_1_OFFSET_ORIGINAL_SIZE))); + entry.setLastModifiedDate(new Date(ZipUtil.dosToJavaTime(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_1_OFFSET_LAST_MODIFIED_DATE_TIME))))); + + final int filenameLength = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_FILENAME_LENGTH)); + buffer.position(HEADER_LEVEL_1_OFFSET_FILENAME); + entry.setName(getPathname(buffer, filenameLength)); + + entry.setDirectory(isDirectory(entry.getCompressionMethod())); + + entry.setCrcValue(Short.toUnsignedInt(buffer.getShort())); + entry.setOsId(Optional.of(Byte.toUnsignedInt(buffer.get()))); + + if (calculateHeaderChecksum(buffer) != baseHeaderChecksum) { + throw new ArchiveException("Invalid header level 1 checksum"); + } + + // Create a list to store base header and all extended headers + // to be able to calculate the CRC of the full header + final List headerParts = new ArrayList<>(); + headerParts.add(buffer); + + int nextHeaderSize = Short.toUnsignedInt(buffer.getShort()); + while (nextHeaderSize > 0) { + final ByteBuffer extendedHeaderBuffer = readExtendedHeader(nextHeaderSize); + skipSize -= nextHeaderSize; + + parseExtendedHeader(extendedHeaderBuffer, entry); + + headerParts.add(extendedHeaderBuffer); + + nextHeaderSize = Short.toUnsignedInt(extendedHeaderBuffer.getShort(extendedHeaderBuffer.limit() - 2)); + } + + entry.setCompressedSize(skipSize); + + if (entry.getHeaderCrc().isPresent()) { + // Calculate CRC16 of full header + final long headerCrc = calculateCRC16(headerParts.toArray(new ByteBuffer[headerParts.size()])); + if (headerCrc != entry.getHeaderCrc().get()) { + throw new ArchiveException("Invalid header CRC expected=0x%04x found=0x%04x", headerCrc, entry.getHeaderCrc().get()); + } + } + + prepareDecompression(entry); + + return entry; + } + + /** + * Read LHA header level 2. + * + * @param buffer the buffer containing the header data + * @return the LhaArchiveEntry read from the buffer + * @throws IOException + */ + protected LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException { + final int headerSize = Short.toUnsignedInt(buffer.getShort(HEADER_LEVEL_2_OFFSET_HEADER_SIZE)); + if (headerSize < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { + throw new ArchiveException("Invalid header level 2 length: %d", headerSize); + } + + buffer = readRemainingHeaderData(buffer, headerSize); + + final LhaArchiveEntry entry = new LhaArchiveEntry(); + entry.setCompressionMethod(getCompressionMethod(buffer)); + entry.setCompressedSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_2_OFFSET_COMPRESSED_SIZE))); + entry.setSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_2_OFFSET_ORIGINAL_SIZE))); + entry.setLastModifiedDate(new Date(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_2_OFFSET_LAST_MODIFIED_DATE_TIME)) * 1000)); + entry.setName(""); + entry.setDirectory(isDirectory(entry.getCompressionMethod())); + entry.setCrcValue(Short.toUnsignedInt(buffer.getShort(HEADER_LEVEL_2_OFFSET_CRC))); + entry.setOsId(Optional.of(Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_2_OFFSET_OS_ID)))); + + int extendedHeaderOffset = HEADER_LEVEL_2_OFFSET_FIRST_EXTENDED_HEADER_SIZE; + int nextHeaderSize = Short.toUnsignedInt(buffer.getShort(extendedHeaderOffset)); + while (nextHeaderSize > 0) { + // Create new ByteBuffer as a slice from the full header. Set limit to the extended header length. + final ByteBuffer extendedHeaderBuffer = byteBufferSlice(buffer, extendedHeaderOffset + 2, nextHeaderSize).order(ByteOrder.LITTLE_ENDIAN); + + extendedHeaderOffset += nextHeaderSize; + + parseExtendedHeader(extendedHeaderBuffer, entry); + + nextHeaderSize = Short.toUnsignedInt(extendedHeaderBuffer.getShort(extendedHeaderBuffer.limit() - 2)); + } + + if (entry.getHeaderCrc().isPresent()) { + // Calculate CRC16 of full header + final long headerCrc = calculateCRC16(buffer); + if (headerCrc != entry.getHeaderCrc().get()) { + throw new ArchiveException("Invalid header CRC expected=0x%04x found=0x%04x", headerCrc, entry.getHeaderCrc().get()); + } + } + + prepareDecompression(entry); + + return entry; + } + + /** + * Get the compression method from the header. It is always located at the same offset for all header levels. + * + * @param buffer the buffer containing the header data + * @return compression method, e.g. -lh5- + * @throws ArchiveException if the compression method is invalid + */ + protected static String getCompressionMethod(final ByteBuffer buffer) throws ArchiveException { + final byte[] compressionMethodBuffer = new byte[5]; + byteBufferGet(buffer, HEADER_GENERIC_OFFSET_COMPRESSION_METHOD, compressionMethodBuffer); + + // Validate the compression method + if (compressionMethodBuffer[0] == '-' && + Character.isLowerCase(compressionMethodBuffer[1]) && + Character.isLowerCase(compressionMethodBuffer[2]) && + (Character.isLowerCase(compressionMethodBuffer[3]) || Character.isDigit(compressionMethodBuffer[3])) && + compressionMethodBuffer[4] == '-') { + return new String(compressionMethodBuffer, StandardCharsets.US_ASCII); + } else { + throw new ArchiveException("Invalid compression method: 0x%02x 0x%02x 0x%02x 0x%02x 0x%02x", + compressionMethodBuffer[0], + compressionMethodBuffer[1], + compressionMethodBuffer[2], + compressionMethodBuffer[3], + compressionMethodBuffer[4]); + } + } + + /** + * Get the pathname from the current position in the provided buffer. Any 0xFF bytes + * and '\' chars will be converted into the configured file path separator char. + * Any leading file path separator char will be removed to avoid extracting to + * absolute locations. + * + * @param buffer the buffer where to get the pathname from + * @param pathnameLength the length of the pathname + * @return pathname + */ + protected String getPathname(final ByteBuffer buffer, final int pathnameLength) { + final byte[] pathnameBuffer = new byte[pathnameLength]; + buffer.get(pathnameBuffer); + + // Split the pathname into parts by 0xFF bytes + final StringBuilder pathnameStringBuilder = new StringBuilder(); + int start = 0; + for (int i = 0; i < pathnameLength; i++) { + if (pathnameBuffer[i] == (byte) 0xFF) { + if (i > start) { + // Decode the path segment into a string using the specified charset and append it to the result + pathnameStringBuilder.append(new String(pathnameBuffer, start, i - start, getCharset())).append(fileSeparatorChar); + } + + start = i + 1; // Move start to the next segment + } + } + + // Append the last segment if it exists + if (start < pathnameLength) { + pathnameStringBuilder.append(new String(pathnameBuffer, start, pathnameLength - start, getCharset())); + } + + String pathname = pathnameStringBuilder.toString(); + + // If the path separator char is not '\', replace all '\' characters with the path separator char + if (fileSeparatorChar != '\\') { + pathname = pathname.replace('\\', fileSeparatorChar); + } + + // Remove leading file separator chars to avoid extracting to absolute locations + while (pathname.length() > 0 && pathname.charAt(0) == fileSeparatorChar) { + pathname = pathname.substring(1); + } + + return pathname; + } + + /** + * Read the remaining part of the header and append it to the already loaded parts. + * + * @param currentHeader all header parts that have already been loaded into memory + * @param headerSize total header size + * @return header the complete header as a ByteBuffer + * @throws IOException + */ + private ByteBuffer readRemainingHeaderData(final ByteBuffer currentHeader, final int headerSize) throws IOException { + final byte[] remainingData = new byte[headerSize - currentHeader.capacity()]; + final int len = in.read(remainingData); + if (len != remainingData.length) { + throw new ArchiveException("Error reading remaining header"); + } + + return ByteBuffer.allocate(currentHeader.capacity() + len).put(currentHeader.array()).put(remainingData).order(ByteOrder.LITTLE_ENDIAN); + } + + /** + * Read extended header from the input stream. + * + * @param headerSize the size of the extended header to read + * @return the extended header as a ByteBuffer + * @throws IOException + */ + private ByteBuffer readExtendedHeader(final int headerSize) throws IOException { + final byte[] extensionHeader = new byte[headerSize]; + final int len = in.read(extensionHeader); + if (len != extensionHeader.length) { + throw new ArchiveException("Error reading extended header"); + } + + return ByteBuffer.wrap(extensionHeader).order(ByteOrder.LITTLE_ENDIAN); + } + + /** + * Parse the extended header and set the values in the provided entry. + * + * @param extendedHeaderBuffer the buffer containing the extended header + * @param entry the entry to set the values in + * @throws IOException + */ + protected void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final LhaArchiveEntry entry) throws IOException { + final int extendedHeaderType = Byte.toUnsignedInt(extendedHeaderBuffer.get()); + if (extendedHeaderType == EXTENDED_HEADER_TYPE_COMMON) { + // Common header + final int crcPos = extendedHeaderBuffer.position(); // Save the current position to be able to set the header CRC later + + // Header CRC + entry.setHeaderCrc(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); + + // Set header CRC to zero to be able to later compute the CRC of the full header + extendedHeaderBuffer.putShort(crcPos, (short) 0); + } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_FILENAME) { + // File name header + final int filenameLength = extendedHeaderBuffer.limit() - extendedHeaderBuffer.position() - 2; + final String filename = getPathname(extendedHeaderBuffer, filenameLength); + if (entry.getName() == null || "".equals(entry.getName())) { + entry.setName(filename); + } else { + final StringBuilder entryNameBuilder = new StringBuilder(entry.getName()); + if (entryNameBuilder.charAt(entryNameBuilder.length() - 1) != fileSeparatorChar) { + // If the entry name does not end with a file separator, append it + entryNameBuilder.append(fileSeparatorChar); + } + + entryNameBuilder.append(filename); + + entry.setName(entryNameBuilder.toString()); + } + } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_DIRECTORY_NAME) { + // Directory name header + final int directoryNameLength = extendedHeaderBuffer.limit() - extendedHeaderBuffer.position() - 2; + final String directoryName = getPathname(extendedHeaderBuffer, directoryNameLength); + if (entry.getName() == null || "".equals(entry.getName())) { + entry.setName(directoryName); + } else { + final StringBuilder entryNameBuilder = new StringBuilder(directoryName); + if (entryNameBuilder.charAt(entryNameBuilder.length() - 1) != fileSeparatorChar) { + // If the directory name does not end with a file separator, append it + entryNameBuilder.append(fileSeparatorChar); + } + + entryNameBuilder.append(entry.getName()); + + entry.setName(entryNameBuilder.toString()); + } + } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_MSDOS_FILE_ATTRIBUTES) { + // MS-DOS file attributes + entry.setMsdosFileAttributes(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); + } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_PERMISSION) { + // UNIX file permission + entry.setUnixPermissionMode(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); + } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_UID_GID) { + // UNIX group/user ID + entry.setUnixGroupId(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); + entry.setUnixUserId(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); + } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_TIMESTAMP) { + // UNIX last modified time + entry.setLastModifiedDate(new Date(Integer.toUnsignedLong(extendedHeaderBuffer.getInt()) * 1000)); + } + + // Ignore unknown extended header + } + + /** + * Check if the compression method is a directory entry. + * + * @param compressionMethod the compression method + * @return true if the compression method is a directory entry, false otherwise + */ + private boolean isDirectory(final String compressionMethod) { + return COMPRESSION_METHOD_DIRECTORY.equals(compressionMethod); + } + + /** + * Calculate the header sum for level 0 and 1 headers. The checksum is calculated by summing the + * value of all bytes in the header except for the first two bytes (header length and header checksum) + * and get the low 8 bits. + * + * @param buffer the buffer containing the header + * @return checksum + */ + private int calculateHeaderChecksum(final ByteBuffer buffer) { + int sum = 0; + for (int i = 2; i < buffer.limit(); i++) { + sum += Byte.toUnsignedInt(buffer.get(i)); + } + + return sum & 0xff; + } + + /** + * Calculate the CRC16 checksum of the provided buffers. + * + * @param buffers the buffers to calculate the CRC16 checksum for + * @return CRC16 checksum + */ + private long calculateCRC16(final ByteBuffer... buffers) { + final CRC16 crc = new CRC16(); + for (ByteBuffer buffer : buffers) { + crc.update(buffer.array(), 0, buffer.limit()); + } + + return crc.getValue(); + } + + private void prepareDecompression(final LhaArchiveEntry entry) throws IOException { + // Make sure we never read more than the compressed size of the entry + this.currentCompressedStream = BoundedInputStream.builder() + .setInputStream(in) + .setMaxCount(entry.getCompressedSize()) + .get(); + + if (isDirectory(entry.getCompressionMethod())) { + // Directory entry + this.currentDecompressedStream = new ByteArrayInputStream(new byte [0]); + } else if (COMPRESSION_METHOD_LH0.equals(entry.getCompressionMethod()) || COMPRESSION_METHOD_LZ4.equals(entry.getCompressionMethod())) { + // No compression + this.currentDecompressedStream = ChecksumInputStream.builder() + .setChecksum(new CRC16()) + .setExpectedChecksumValue(entry.getCrcValue()) + .setInputStream(this.currentCompressedStream) + .get(); + } else if (COMPRESSION_METHOD_LH4.equals(entry.getCompressionMethod())) { + this.currentDecompressedStream = ChecksumInputStream.builder() + .setChecksum(new CRC16()) + .setExpectedChecksumValue(entry.getCrcValue()) + .setInputStream(new Lh4CompressorInputStream(this.currentCompressedStream)) + .get(); + } else if (COMPRESSION_METHOD_LH5.equals(entry.getCompressionMethod())) { + this.currentDecompressedStream = ChecksumInputStream.builder() + .setChecksum(new CRC16()) + .setExpectedChecksumValue(entry.getCrcValue()) + .setInputStream(new Lh5CompressorInputStream(this.currentCompressedStream)) + .get(); + } else if (COMPRESSION_METHOD_LH6.equals(entry.getCompressionMethod())) { + this.currentDecompressedStream = ChecksumInputStream.builder() + .setChecksum(new CRC16()) + .setExpectedChecksumValue(entry.getCrcValue()) + .setInputStream(new Lh6CompressorInputStream(this.currentCompressedStream)) + .get(); + } else if (COMPRESSION_METHOD_LH7.equals(entry.getCompressionMethod())) { + this.currentDecompressedStream = ChecksumInputStream.builder() + .setChecksum(new CRC16()) + .setExpectedChecksumValue(entry.getCrcValue()) + .setInputStream(new Lh7CompressorInputStream(this.currentCompressedStream)) + .get(); + } else { + // Unsupported compression + this.currentDecompressedStream = null; + } + } + + /** + * Create a new ByteBuffer slice from the provided buffer at the specified position and length. This is needed until this + * repo has been updated to use Java 9+ where we can use buffer.position(position).slice().limit(length) directly. + * + * @param buffer the buffer to slice from + * @param position the position in the buffer to start slicing from + * @param length the length of the slice + * @return a new ByteBuffer slice with the specified position and length + */ + private ByteBuffer byteBufferSlice(final ByteBuffer buffer, final int position, final int length) { + return ByteBuffer.wrap(buffer.array(), position, length); + } + + /** + * Get a byte array from the ByteBuffer at the specified position and length. + * This is needed until this repo has been updated to use Java 9+ where we + * can use buffer.get(position, dst) directly. + * + * @param buffer the buffer to get the byte array from + * @param position the position in the buffer to start reading from + * @param dst the destination byte array to fill + */ + private static void byteBufferGet(final ByteBuffer buffer, final int position, final byte[] dst) { + for (int i = 0; i < dst.length; i++) { + dst[i] = buffer.get(position + i); + } + } +} diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/package-info.java b/src/main/java/org/apache/commons/compress/archivers/lha/package-info.java new file mode 100644 index 00000000000..370527b3366 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/lha/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Provides stream classes for reading archives using the LHA format, also known as the LZH format or LHarc format. + */ +package org.apache.commons.compress.archivers.lha; diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java new file mode 100644 index 00000000000..8a314361327 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java @@ -0,0 +1,344 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteOrder; + +import org.apache.commons.compress.compressors.CompressorException; +import org.apache.commons.compress.compressors.CompressorInputStream; +import org.apache.commons.compress.utils.BitInputStream; +import org.apache.commons.compress.utils.CircularBuffer; +import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.input.CloseShieldInputStream; + +/** + * This is an implementation of a static Huffman compressor input stream for LHA files that + * supports lh4, lh5, lh6 and lh7 compression methods. + * + * This implementation is based on the documentation that can be found at + * https://github.com/jca02266/lha/blob/master/Hacking_of_LHa + */ +abstract class AbstractLhStaticHuffmanCompressorInputStream extends CompressorInputStream implements InputStreamStatistics { + // Constants for command tree decoding + private static final int COMMAND_DECODING_LENGTH_BITS = 5; // Number of bits used to encode the command decoding tree length + private static final int MAX_NUMBER_OF_COMMAND_DECODING_CODE_LENGTHS = 19; // Maximum number of codes in the command decoding tree + + // Constants for command tree + private static final int COMMAND_TREE_LENGTH_BITS = 9; // Number of bits used to encode the command tree length + + // Constants for code length + private static final int CODE_LENGTH_BITS = 3; // Number of bits used to encode the code length + private static final int MAX_CODE_LENGTH = 16; + + private BitInputStream bin; + private CircularBuffer buffer; + private int blockSize; + private BinaryTree commandTree; // Command is either a literal or a copy command + private BinaryTree distanceTree; // Distance is the offset to copy from the sliding dictionary + + /** + * Constructs a new CompressorInputStream which decompresses bytes read from the specified stream. + * + * @param in the InputStream from which to read compressed data + * @throws IOException if an I/O error occurs + */ + AbstractLhStaticHuffmanCompressorInputStream(final InputStream in) throws IOException { + this.bin = new BitInputStream(in == System.in ? CloseShieldInputStream.wrap(in) : in, ByteOrder.BIG_ENDIAN); + + // Create a sliding dictionary buffer that can hold the full dictionary size and the maximum match length + this.buffer = new CircularBuffer(getDictionarySize() + getMaxMatchLength()); + } + + @Override + public void close() throws IOException { + if (this.bin != null) { + try { + this.bin.close(); + } finally { + this.bin = null; + this.buffer = null; + this.blockSize = -1; + } + } + } + + /** + * Get the threshold for copying data from the sliding dictionary. This is the minimum + * possible number of bytes that will be part of a copy command. + * + * @return the copy threshold + */ + protected int getCopyThreshold() { + return 3; + } + + /** + * Get the number of bits used for the dictionary size. + * + * @return the number of bits used for the dictionary size + */ + protected abstract int getDictionaryBits(); + + /** + * Get the size of the dictionary. + * + * @return the size of the dictionary + */ + protected int getDictionarySize() { + return 1 << getDictionaryBits(); + } + + /** + * Get the number of bits used for the distance. + * + * @return the number of bits used for the distance + */ + protected abstract int getDistanceBits(); + + protected int getDistanceCodeSize() { + return getDictionaryBits() + 1; + } + + /** + * Get the maximum match length for the copy command. + * + * @return the maximum match length + */ + protected int getMaxMatchLength() { + return 256; + } + + /** + * Get the maximum number of commands in the command tree. + * This is 256 literals (0-255) and 254 copy lengths combinations (3-256). + * + * @return the maximum number of commands + */ + protected int getMaxNumberOfCommands() { + return 256 + getMaxMatchLength() - getCopyThreshold() + 1; + } + + @Override + public long getCompressedCount() { + return bin.getBytesRead(); + } + + @Override + public int read() throws IOException { + if (!buffer.available()) { + // Nothing in the buffer, try to fill it + fillBuffer(); + } + + final int ret = buffer.get(); + count(ret < 0 ? 0 : 1); // Increment input stream statistics + return ret; + } + + /** + * Fill the sliding dictionary with more data. + * + * @throws IOException if an I/O error occurs + */ + private void fillBuffer() throws IOException { + if (this.blockSize == -1) { + // End of stream + return; + } else if (this.blockSize == 0) { + // Start to read the next block + + // Read the block size (number of commands to read) + this.blockSize = (int) bin.readBits(16); + if (this.blockSize == -1) { + // End of stream + return; + } + + final BinaryTree commandDecodingTree = readCommandDecodingTree(); + + this.commandTree = readCommandTree(commandDecodingTree); + + this.distanceTree = readDistanceTree(); + } + + this.blockSize--; + + final int command = commandTree.read(bin); + if (command < 0x100) { + // Literal command, just write the byte to the buffer + buffer.put(command); + } else { + // Copy command, read the distance and calculate the length from the command + final int distance = readDistance(); + final int length = command - 0x100 + getCopyThreshold(); + + // Copy the data from the sliding dictionary and add to the buffer + buffer.copy(distance + 1, length); + } + } + + /** + * Read the command decoding tree. The command decoding tree is used when reading the command tree + * which is then actually used to decode the commands (literals or copy commands). + * + * @return the command decoding tree + * @throws IOException if an I/O error occurs + */ + protected BinaryTree readCommandDecodingTree() throws IOException { + // Number of code lengths to read + final int numCodeLengths = (int) bin.readBits(COMMAND_DECODING_LENGTH_BITS); + + if (numCodeLengths > MAX_NUMBER_OF_COMMAND_DECODING_CODE_LENGTHS) { + throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, MAX_NUMBER_OF_COMMAND_DECODING_CODE_LENGTHS); + } else if (numCodeLengths == 0) { + // If numCodeLengths is zero, we read a single code length of COMMAND_DECODING_LENGTH_BITS bits and use as root of the tree + return new BinaryTree(new int[] { (int) bin.readBits(COMMAND_DECODING_LENGTH_BITS) }); + } else { + // Read all code lengths + final int[] codeLengths = new int[numCodeLengths]; + for (int index = 0; index < numCodeLengths; index++) { + codeLengths[index] = readCodeLength(); + + if (index == 2) { + // After reading the first three code lengths, we read a 2-bit skip range + index += (int) bin.readBits(2); + } + } + + return new BinaryTree(codeLengths); + } + } + + /** + * Read code length (depth in tree). Usually 0-7 but could be higher and if so, + * count the number of following consecutive one bits and add to the length. + * + * @return code length + * @throws IOException if an I/O error occurs + */ + protected int readCodeLength() throws IOException { + int len = (int) bin.readBits(CODE_LENGTH_BITS); + if (len == 0x07) { + // Count the number of following consecutive one bits + while (bin.readBit() == 1) { + if (len == MAX_CODE_LENGTH) { + throw new CompressorException("Code length overflow"); + } + + len++; + } + } + + return len; + } + + /** + * Read the command tree which is used to decode the commands (literals or copy commands). + * + * @param commandDecodingTree the Huffman tree used to decode the command lengths + * @return the command tree + * @throws IOException if an I/O error occurs + */ + protected BinaryTree readCommandTree(final BinaryTree commandDecodingTree) throws IOException { + final int numCodeLengths = (int) bin.readBits(COMMAND_TREE_LENGTH_BITS); + + if (numCodeLengths > getMaxNumberOfCommands()) { + throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, getMaxNumberOfCommands()); + } else if (numCodeLengths == 0) { + // If numCodeLengths is zero, we read a single code length of COMMAND_TREE_LENGTH_BITS bits and use as root of the tree + return new BinaryTree(new int[] { (int) bin.readBits(COMMAND_TREE_LENGTH_BITS) }); + } else { + // Read all code lengths + final int[] codeLengths = new int[numCodeLengths]; + + for (int index = 0; index < numCodeLengths;) { + final int codeOrSkipRange = commandDecodingTree.read(bin); + + if (codeOrSkipRange == 0) { + // Skip one code length + index++; + } else if (codeOrSkipRange == 1) { + // Skip a range of code lengths, read 4 bits to determine how many to skip + index += (int) bin.readBits(4) + 3; + } else if (codeOrSkipRange == 2) { + // Skip a range of code lengths, read 9 bits to determine how many to skip + index += (int) bin.readBits(9) + 20; + } else { + // Subtract 2 from the codeOrSkipRange to get the code length + codeLengths[index++] = codeOrSkipRange - 2; + } + } + + return new BinaryTree(codeLengths); + } + } + + /** + * Read the distance tree which is used to decode the distance of the copy command. + * + * @return the distance tree + * @throws IOException if an I/O error occurs + */ + private BinaryTree readDistanceTree() throws IOException { + // Number of code lengths to read + final int numCodeLengths = (int) bin.readBits(getDistanceBits()); + + if (numCodeLengths > getDistanceCodeSize()) { + throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, getDistanceCodeSize()); + } else if (numCodeLengths == 0) { + // If numCodeLengths is zero, we read a single code length of getDistanceBits() bits and use as root of the tree + return new BinaryTree(new int[] { (int) bin.readBits(getDistanceBits()) }); + } else { + // Read all code lengths + final int[] codeLengths = new int[numCodeLengths]; + for (int index = 0; index < numCodeLengths; index++) { + codeLengths[index] = readCodeLength(); + } + + return new BinaryTree(codeLengths); + } + } + + /** + * Read the distance by first decoding the number of bits to read from the distance tree + * and then reading the actual distance value from the bit input stream. + * + * @return the distance + * @throws IOException if an I/O error occurs + */ + private int readDistance() throws IOException { + // Determine the number of bits to read for the distance by reading an entry from the distance tree + final int bits = distanceTree.read(bin); + + if (bits == 0 || bits == 1) { + // This is effectively run length encoding + return bits; + } else { + // Bits minus one is the number of bits to read for the distance + final int value = (int) bin.readBits(bits - 1); + + // Add the implicit bit (1 << (bits - 1)) to the value read from the stream giving the distance. + // E.g. if bits is 6, we read 5 bits giving value 8 and then we add 32 giving a distance of 40. + return value | (1 << (bits - 1)); + } + } +} diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java b/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java new file mode 100644 index 00000000000..615d9c0616e --- /dev/null +++ b/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import java.io.IOException; +import java.util.Arrays; + +import org.apache.commons.compress.compressors.CompressorException; +import org.apache.commons.compress.utils.BitInputStream; +import org.apache.commons.lang3.ArrayFill; + +/** + * Binary tree of positive values. + * + * Copied from org.apache.commons.compress.archivers.zip.BinaryTree and modified for LHA. + */ +class BinaryTree { + + /** Value in the array indicating an undefined node */ + private static final int UNDEFINED = -1; + + /** Value in the array indicating a non leaf node */ + private static final int NODE = -2; + + /** + * The array representing the binary tree. The root is at index 0, the left children (0) are at 2*i+1 and the right children (1) at 2*i+2. + */ + private final int[] tree; + + /** + * Constructs a binary tree from the given array that contains the depth (code length) in the + * binary tree as values in the array and the index into the array as the value of the leaf node. + * + * If the array contains a single value, this is a special case where there is only one node in the + * tree (the root node) and it contains the value. For this case, the array contains the value of + * the root node instead of the depth in the tree. This special case also means that no bits will + * be read from the bit stream when the read method is called, as there are no children to traverse. + * + * @param array the array to build the binary tree from + */ + BinaryTree(final int[] array) { + if (array.length == 1) { + // Tree only contains a single value, which is the root node value + this.tree = new int[] { array[0] }; + return; + } + + // Determine the maximum depth of the tree from the input array + final int maxDepth = Arrays.stream(array).max().getAsInt(); + + // Allocate binary tree with enough space for all nodes + this.tree = initTree(maxDepth); + + int treePos = 0; + + // Add root node pointing to left (0) and right (1) children + this.tree[treePos++] = NODE; + + // Iterate over each possible tree depth (starting from 1) + for (int currentDepth = 1; currentDepth <= maxDepth; currentDepth++) { + final int startPos = (1 << currentDepth) - 1; // Start position for the first node at this depth + final int maxNodesAtCurrentDepth = 1 << currentDepth; // Max number of nodes at this depth + int numNodesAtCurrentDepth = treePos - startPos; // Number of nodes added at this depth taking into account any already skipped nodes (UNDEFINED) + + // Add leaf nodes for values with the current depth + for (int value = 0; value < array.length; value++) { + if (array[value] == currentDepth) { + this.tree[treePos++] = value; // Add leaf (value) node + numNodesAtCurrentDepth++; + } + } + + // Add nodes pointing to child nodes until the maximum number of nodes at this depth is reached + int skipToTreePos = -1; + while (currentDepth != maxDepth && numNodesAtCurrentDepth < maxNodesAtCurrentDepth) { + if (skipToTreePos == -1) { + skipToTreePos = 2 * treePos + 1; // Next depth's tree position that this node's left (0) child would occupy + } + + this.tree[treePos++] = NODE; // Add node pointing to left (0) and right (1) children + numNodesAtCurrentDepth++; + } + + if (skipToTreePos != -1) { + treePos = skipToTreePos; // Skip to the next depth's tree position based on the first node at this depth + } + } + } + + /** + * Initializes the binary tree with the specified depth but with all nodes as UNDEFINED. + * + * @param depth the depth of the tree, must be between 0 and 16 (inclusive) + * @return an array representing the binary tree, initialized with UNDEFINED values + */ + private int[] initTree(final int depth) { + if (depth < 0 || depth > 16) { + throw new IllegalArgumentException("Depth must not be negative and not bigger than 16 but is " + depth); + } + + final int arraySize = depth == 0 ? 1 : (int) ((1L << depth + 1) - 1); // Depth 0 has only a single node (the root) + + return ArrayFill.fill(new int[arraySize], UNDEFINED); + } + + /** + * Reads a value from the specified bit stream. + * + * @param stream The data source. + * @return the value decoded, or -1 if the end of the stream is reached + * @throws IOException on error. + */ + public int read(final BitInputStream stream) throws IOException { + int currentIndex = 0; + + while (true) { + final int value = tree[currentIndex]; + if (value == NODE) { + // Consume the next bit + final int bit = stream.readBit(); + if (bit == -1) { + return -1; + } + + currentIndex = 2 * currentIndex + 1 + bit; + } else if (value == UNDEFINED) { + throw new CompressorException("Invalid bitstream. The node at index %d is not defined.", currentIndex); + } else { + return value; + } + } + } +} diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java new file mode 100644 index 00000000000..9cf6154adcc --- /dev/null +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Decompressor for lh4. It has a dictionary size of 4096 bytes. + * + * @since 1.29 + */ +public class Lh4CompressorInputStream extends AbstractLhStaticHuffmanCompressorInputStream { + public Lh4CompressorInputStream(final InputStream in) throws IOException { + super(in); + } + + @Override + protected int getDictionaryBits() { + return 12; + } + + @Override + protected int getDistanceBits() { + return 4; + } + + @Override + protected int getDistanceCodeSize() { + return getDictionaryBits() + 2; + } +} diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java new file mode 100644 index 00000000000..9cdccdc2fa0 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Decompressor for lh5. It has a dictionary size of 8192 bytes. + * + * @since 1.29 + */ +public class Lh5CompressorInputStream extends AbstractLhStaticHuffmanCompressorInputStream { + public Lh5CompressorInputStream(final InputStream in) throws IOException { + super(in); + } + + @Override + protected int getDictionaryBits() { + return 13; + } + + @Override + protected int getDistanceBits() { + return 4; + } +} diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java new file mode 100644 index 00000000000..46deb8ecc4a --- /dev/null +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Decompressor for lh6. It has a dictionary size of 32768 bytes. + * + * @since 1.29 + */ +public class Lh6CompressorInputStream extends AbstractLhStaticHuffmanCompressorInputStream { + public Lh6CompressorInputStream(final InputStream in) throws IOException { + super(in); + } + + @Override + protected int getDictionaryBits() { + return 15; + } + + @Override + protected int getDistanceBits() { + return 5; + } +} diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java new file mode 100644 index 00000000000..7e40b661fd6 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Decompressor for lh7. It has a dictionary size of 65536 bytes. + * + * @since 1.29 + */ +public class Lh7CompressorInputStream extends AbstractLhStaticHuffmanCompressorInputStream { + public Lh7CompressorInputStream(final InputStream in) throws IOException { + super(in); + } + + @Override + protected int getDictionaryBits() { + return 16; + } + + @Override + protected int getDistanceBits() { + return 5; + } +} diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/package-info.java b/src/main/java/org/apache/commons/compress/compressors/lha/package-info.java new file mode 100644 index 00000000000..92d07a68dac --- /dev/null +++ b/src/main/java/org/apache/commons/compress/compressors/lha/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Provides stream classes for decompressing streams found in LHA archives. + */ +package org.apache.commons.compress.compressors.lha; diff --git a/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java b/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java index 7f457640c51..e47834252ac 100644 --- a/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java +++ b/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java @@ -98,7 +98,7 @@ public int get() { * @param value the value to put. */ public void put(final int value) { - if(bytesAvailable == size) { + if (bytesAvailable == size) { throw new IllegalStateException("Buffer overflow: Cannot write to a full buffer"); } diff --git a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java index abba17db4a1..7ae9f5496e6 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java @@ -43,6 +43,7 @@ import org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream; import org.apache.commons.compress.archivers.dump.DumpArchiveInputStream; import org.apache.commons.compress.archivers.jar.JarArchiveInputStream; +import org.apache.commons.compress.archivers.lha.LhaArchiveInputStream; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; import org.apache.commons.io.input.BrokenInputStream; @@ -96,6 +97,7 @@ public String toString() { */ private static final String ARJ_DEFAULT; private static final String DUMP_DEFAULT; + private static final String LHA_DEFAULT = getCharsetName(new LhaArchiveInputStream(null)); private static final String ZIP_DEFAULT = getCharsetName(new ZipArchiveInputStream(null)); private static final String CPIO_DEFAULT = getCharsetName(new CpioArchiveInputStream(null)); private static final String TAR_DEFAULT = getCharsetName(new TarArchiveInputStream(null)); @@ -138,6 +140,12 @@ public String toString() { new TestData("bla.dump", ArchiveStreamFactory.DUMP, false, StandardCharsets.UTF_8.name(), FACTORY_SET_UTF8, "charsetName"), new TestData("bla.dump", ArchiveStreamFactory.DUMP, false, StandardCharsets.US_ASCII.name(), FACTORY_SET_ASCII, "charsetName"), + new TestData("bla.lha", ArchiveStreamFactory.LHA, false, LHA_DEFAULT, FACTORY, "charsetName"), + new TestData("bla.lha", ArchiveStreamFactory.LHA, false, StandardCharsets.UTF_8.name(), FACTORY_UTF8, "charsetName"), + new TestData("bla.lha", ArchiveStreamFactory.LHA, false, StandardCharsets.US_ASCII.name(), FACTORY_ASCII, "charsetName"), + new TestData("bla.lha", ArchiveStreamFactory.LHA, false, StandardCharsets.UTF_8.name(), FACTORY_SET_UTF8, "charsetName"), + new TestData("bla.lha", ArchiveStreamFactory.LHA, false, StandardCharsets.US_ASCII.name(), FACTORY_SET_ASCII, "charsetName"), + new TestData("bla.tar", ArchiveStreamFactory.TAR, true, TAR_DEFAULT, FACTORY, "charsetName"), new TestData("bla.tar", ArchiveStreamFactory.TAR, true, StandardCharsets.UTF_8.name(), FACTORY_UTF8, "charsetName"), new TestData("bla.tar", ArchiveStreamFactory.TAR, true, StandardCharsets.US_ASCII.name(), FACTORY_ASCII, "charsetName"), @@ -260,7 +268,7 @@ void testDetect() throws Exception { for (final String extension : new String[] { ArchiveStreamFactory.AR, ArchiveStreamFactory.ARJ, ArchiveStreamFactory.CPIO, ArchiveStreamFactory.DUMP, // Compress doesn't know how to detect JARs, see COMPRESS-91 // ArchiveStreamFactory.JAR, - ArchiveStreamFactory.SEVEN_Z, ArchiveStreamFactory.TAR, ArchiveStreamFactory.ZIP }) { + ArchiveStreamFactory.LHA, ArchiveStreamFactory.SEVEN_Z, ArchiveStreamFactory.TAR, ArchiveStreamFactory.ZIP }) { assertEquals(extension, detect("bla." + extension)); } diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/CRC16Test.java b/src/test/java/org/apache/commons/compress/archivers/lha/CRC16Test.java new file mode 100644 index 00000000000..1407b44ab79 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/lha/CRC16Test.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.lha; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.nio.charset.StandardCharsets; +import java.util.zip.Checksum; + +import org.junit.jupiter.api.Test; + +class CRC16Test { + @Test + void testCRC16() { + final Checksum crc16 = new CRC16(); + crc16.update("123456789".getBytes(StandardCharsets.US_ASCII), 0, 9); + assertEquals(0xbb3d, crc16.getValue()); + } +} diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java new file mode 100644 index 00000000000..0c51e59a107 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.lha; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.Date; +import java.util.Optional; + +import org.junit.jupiter.api.Test; + +class LhaArchiveEntryTest { + @Test + void testToStringMinimal() { + final LhaArchiveEntry entry = new LhaArchiveEntry(); + entry.setName("test1.txt"); + entry.setDirectory(false); + entry.setSize(57); + entry.setLastModifiedDate(new Date(1754236942000L)); // 2025-08-03T16:02:22Z + entry.setCompressedSize(52); + entry.setCompressionMethod("-lh5-"); + entry.setCrcValue(0x6496); + + assertEquals("LhaArchiveEntry[name=test1.txt,directory=false,size=57,lastModifiedDate=2025-08-03T16:02:22Z,compressedSize=52," + + "compressionMethod=-lh5-,crcValue=0x6496]", entry.toString()); + } + + @Test + void testToStringAllFields() { + final LhaArchiveEntry entry = new LhaArchiveEntry(); + entry.setName("dir1/test1.txt"); + entry.setDirectory(false); + entry.setSize(57); + entry.setLastModifiedDate(new Date(1754236942000L)); // 2025-08-03T16:02:22Z + entry.setCompressedSize(52); + entry.setCompressionMethod("-lh5-"); + entry.setCrcValue(0x6496); + entry.setOsId(Optional.of(85)); + entry.setUnixPermissionMode(Optional.of(0100644)); + entry.setUnixGroupId(Optional.of(20)); + entry.setUnixUserId(Optional.of(501)); + entry.setMsdosFileAttributes(Optional.of(0x0010)); + entry.setHeaderCrc(Optional.of(0xb772)); + + assertEquals( + "LhaArchiveEntry[name=dir1/test1.txt,directory=false,size=57,lastModifiedDate=2025-08-03T16:02:22Z,compressedSize=52," + + "compressionMethod=-lh5-,crcValue=0x6496,osId=85,unixPermissionMode=100644,msdosFileAttributes=0010,headerCrc=0xb772]", + entry.toString()); + } +} diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java new file mode 100644 index 00000000000..691d81d3655 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java @@ -0,0 +1,1381 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.lha; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.apache.commons.compress.AbstractTest; +import org.apache.commons.compress.archivers.ArchiveException; +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.Test; + +class LhaArchiveInputStreamTest extends AbstractTest { + private static final int[] VALID_HEADER_LEVEL_0_FILE = new int[] { + 0x2b, 0x70, 0x2d, 0x6c, 0x68, 0x35, 0x2d, 0x34, 0x00, 0x00, 0x00, 0x39, 0x00, 0x00, 0x00, 0x4b, // |+p-lh5-4...9...K| + 0x80, 0x03, 0x5b, 0x20, 0x00, 0x09, 0x74, 0x65, 0x73, 0x74, 0x31, 0x2e, 0x74, 0x78, 0x74, 0x96, // |..[ ..test1.txt.| + 0x64, 0x55, 0x00, 0xef, 0x6b, 0x8f, 0x68, 0xa4, 0x81, 0xf5, 0x01, 0x14, 0x00, 0x00, 0x39, 0x4a, // |dU..k.h.......9J| + 0x8e, 0x8d, 0x33, 0xb7, 0x3e, 0x80, 0x1f, 0xe8, 0x4d, 0x01, 0x3a, 0x00, 0x12, 0xb4, 0xc7, 0x83, // |..3.>...M.:.....| + 0x5a, 0x8d, 0xf4, 0x03, 0xe9, 0xe3, 0xc0, 0x3b, 0xae, 0xc0, 0xc4, 0xe6, 0x78, 0x28, 0xa1, 0x78, // |Z......;....x(.x| + 0x75, 0x60, 0xd3, 0xaa, 0x76, 0x4e, 0xbb, 0xc1, 0x7c, 0x1d, 0x9a, 0x63, 0xaf, 0xc3, 0xe4, 0xaf, // |u`..vN..|..c....| + 0x7c, 0x00 // ||.| + }; + + private static final int[] VALID_HEADER_LEVEL_0_FILE_MACOS_UTF8 = new int[] { + 0x31, 0x65, 0x2d, 0x6c, 0x68, 0x30, 0x2d, 0x0d, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x06, // |1e-lh0-.........| + 0x8c, 0x0d, 0x5b, 0x20, 0x00, 0x0f, 0x74, 0x65, 0x73, 0x74, 0x2d, 0xc3, 0xa5, 0xc3, 0xa4, 0xc3, // |..[ ..test-.....| + 0xb6, 0x2e, 0x74, 0x78, 0x74, 0x57, 0x77, 0x55, 0x00, 0xfc, 0xaf, 0x9c, 0x68, 0xa4, 0x81, 0xf5, // |..txtWwU....h...| + 0x01, 0x14, 0x00, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x57, 0x6f, 0x72, 0x6c, 0x64, 0x21, 0x0a, // |...Hello World!.| + 0x00 // |.| + }; + + private static final int[] VALID_HEADER_LEVEL_0_FILE_MSDOS_ISO8859_1 = new int[] { + 0x22, 0x6b, 0x2d, 0x6c, 0x68, 0x30, 0x2d, 0x0e, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x52, // |"k-lh0-........R| + 0x54, 0x0d, 0x5b, 0x20, 0x00, 0x0c, 0x74, 0x65, 0x73, 0x74, 0x2d, 0xe5, 0xe4, 0xf6, 0x2e, 0x74, // |T.[ ..test-....t| + 0x78, 0x74, 0xb4, 0xc9, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x57, 0x6f, 0x72, 0x6c, 0x64, 0x21, // |xt..Hello World!| + 0x0d, 0x0a, 0x00 // |...| + }; + + private static final int[] VALID_HEADER_LEVEL_1_FILE = new int[] { + 0x22, 0x09, 0x2d, 0x6c, 0x68, 0x35, 0x2d, 0x47, 0x00, 0x00, 0x00, 0x39, 0x00, 0x00, 0x00, 0x4b, // |".-lh5-G...9...K| + 0x80, 0x03, 0x5b, 0x20, 0x01, 0x09, 0x74, 0x65, 0x73, 0x74, 0x31, 0x2e, 0x74, 0x78, 0x74, 0x96, // |..[ ..test1.txt.| + 0x64, 0x55, 0x05, 0x00, 0x50, 0xa4, 0x81, 0x07, 0x00, 0x51, 0x14, 0x00, 0xf5, 0x01, 0x07, 0x00, // |dU..P....Q......| + 0x54, 0xef, 0x6b, 0x8f, 0x68, 0x00, 0x00, 0x00, 0x39, 0x4a, 0x8e, 0x8d, 0x33, 0xb7, 0x3e, 0x80, // |T.k.h...9J..3.>.| + 0x1f, 0xe8, 0x4d, 0x01, 0x3a, 0x00, 0x12, 0xb4, 0xc7, 0x83, 0x5a, 0x8d, 0xf4, 0x03, 0xe9, 0xe3, // |..M.:.....Z.....| + 0xc0, 0x3b, 0xae, 0xc0, 0xc4, 0xe6, 0x78, 0x28, 0xa1, 0x78, 0x75, 0x60, 0xd3, 0xaa, 0x76, 0x4e, // |.;....x(.xu`..vN| + 0xbb, 0xc1, 0x7c, 0x1d, 0x9a, 0x63, 0xaf, 0xc3, 0xe4, 0xaf, 0x7c, 0x00 // |..|..c....|.| + }; + + private static final int[] VALID_HEADER_LEVEL_1_FILE_MSDOS_WITH_CHECKSUM_AND_CRC = new int[] { + 0x19, 0x36, 0x2d, 0x6c, 0x68, 0x64, 0x2d, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3d, // |.6-lhd-........=| + 0x77, 0x0d, 0x5b, 0x20, 0x01, 0x00, 0x00, 0x00, 0x4d, 0x08, 0x00, 0x02, 0x64, 0x69, 0x72, 0x31, // |w.[ ....M...dir1| + 0xff, 0x05, 0x00, 0x40, 0x10, 0x00, 0x05, 0x00, 0x00, 0x72, 0xb7, 0x00, 0x00, 0x22, 0x10, 0x2d, // |...@.....r...".-| + 0x6c, 0x68, 0x30, 0x2d, 0x1b, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x52, 0x54, 0x0d, 0x5b, // |lh0-........RT.[| + 0x20, 0x01, 0x09, 0x74, 0x65, 0x73, 0x74, 0x31, 0x2e, 0x74, 0x78, 0x74, 0xb4, 0xc9, 0x4d, 0x08, // | ..test1.txt..M.| + 0x00, 0x02, 0x64, 0x69, 0x72, 0x31, 0xff, 0x05, 0x00, 0x00, 0x71, 0x9b, 0x00, 0x00, 0x48, 0x65, // |..dir1....q...He| + 0x6c, 0x6c, 0x6f, 0x20, 0x57, 0x6f, 0x72, 0x6c, 0x64, 0x21, 0x0d, 0x0a, 0x00 // |llo World!...| + }; + + private static final int[] VALID_HEADER_LEVEL_2_FILE = new int[] { + 0x37, 0x00, 0x2d, 0x6c, 0x68, 0x35, 0x2d, 0x34, 0x00, 0x00, 0x00, 0x39, 0x00, 0x00, 0x00, 0xef, // |7.-lh5-4...9....| + 0x6b, 0x8f, 0x68, 0x20, 0x02, 0x96, 0x64, 0x55, 0x05, 0x00, 0x00, 0xa5, 0x01, 0x0c, 0x00, 0x01, // |k.h ..dU........| + 0x74, 0x65, 0x73, 0x74, 0x31, 0x2e, 0x74, 0x78, 0x74, 0x05, 0x00, 0x50, 0xa4, 0x81, 0x07, 0x00, // |test1.txt..P....| + 0x51, 0x14, 0x00, 0xf5, 0x01, 0x00, 0x00, 0x00, 0x39, 0x4a, 0x8e, 0x8d, 0x33, 0xb7, 0x3e, 0x80, // |Q.......9J..3.>.| + 0x1f, 0xe8, 0x4d, 0x01, 0x3a, 0x00, 0x12, 0xb4, 0xc7, 0x83, 0x5a, 0x8d, 0xf4, 0x03, 0xe9, 0xe3, // |..M.:.....Z.....| + 0xc0, 0x3b, 0xae, 0xc0, 0xc4, 0xe6, 0x78, 0x28, 0xa1, 0x78, 0x75, 0x60, 0xd3, 0xaa, 0x76, 0x4e, // |.;....x(.xu`..vN| + 0xbb, 0xc1, 0x7c, 0x1d, 0x9a, 0x63, 0xaf, 0xc3, 0xe4, 0xaf, 0x7c, 0x00 // |..|..c....|.| + }; + + @Test + void testInvalidHeaderLevelLength() throws IOException { + final byte[] data = new byte[] { 0x04, 0x00, 0x00, 0x00, 0x00, 0x00 }; + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid header length"); + } catch (ArchiveException e) { + assertEquals("Invalid header length", e.getMessage()); + } + } + + @Test + void testInvalidHeaderLevel() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_0_FILE); + + data[20] = 4; // Change the header level to an invalid value + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid header level"); + } catch (ArchiveException e) { + assertEquals("Invalid header level: 4", e.getMessage()); + } + } + + @Test + void testUnsupportedCompressionMethod() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_0_FILE); + + data[1] = (byte) 0x9c; // Change the header checksum + data[5] = 'a'; // Change the compression method to an unsupported value + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("-lha-", entry.getCompressionMethod()); + + assertFalse(archive.canReadEntryData(entry)); + + try { + IOUtils.toByteArray(archive); + fail("Expected ArchiveException for unsupported compression method"); + } catch (ArchiveException e) { + assertEquals("Unsupported compression method: -lha-", e.getMessage()); + } + } + } + + @Test + void testReadDataBeforeEntry() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_0_FILE); + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try { + IOUtils.toByteArray(archive); + fail("Expected IllegalStateException for reading data before entry"); + } catch (IllegalStateException e) { + assertEquals("No current entry", e.getMessage()); + } + } + } + + @Test + void testParseHeaderLevel0File() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE)))) { + // Entry should be parsed correctly + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(57, entry.getSize()); + assertEquals(1754236942000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-03T16:02:22Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(52, entry.getCompressedSize()); + assertEquals("-lh5-", entry.getCompressionMethod()); + assertEquals(0x6496, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel0FileMacosUtf8() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE_MACOS_UTF8)), "UTF-8")) { + // Entry name should be parsed correctly + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("test-\u00E5\u00E4\u00F6.txt", entry.getName()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel0FileMsdosIso88591() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE_MSDOS_ISO8859_1)), + "ISO-8859-1")) { + // Entry name should be parsed correctly + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("test-\u00E5\u00E4\u00F6.txt", entry.getName()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel0FileMsdosIso88591DefaultEncoding() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE_MSDOS_ISO8859_1)))) { + // First entry should be with replacement characters for unsupported characters + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("test-\uFFFD\uFFFD\uFFFD.txt", entry.getName()); // Unicode replacement characters for unsupported characters + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testInvalidHeaderLevel0Length() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_0_FILE); + + data[0] = 0x10; // Change the first byte to an invalid length + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid header length"); + } catch (ArchiveException e) { + assertEquals("Invalid header level 0 length: 16", e.getMessage()); + } + } + + @Test + void testInvalidHeaderLevel0Checksum() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_0_FILE); + + data[1] = 0x55; // Change the second byte to an invalid header checksum + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid header checksum"); + } catch (ArchiveException e) { + assertEquals("Invalid header level 0 checksum", e.getMessage()); + } + } + + @Test + void testParseHeaderLevel0FileWithFoldersMacos() throws IOException { + // The lha file was generated by LHa for UNIX version 1.14i-ac20211125 for Macos + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0.lha"), null, '/')) { + LhaArchiveEntry entry; + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755090690000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T13:11:30Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755090728000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T13:12:08Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(13, entry.getSize()); + assertEquals(1755090728000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T13:12:08Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(13, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0x7757, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755090812000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T13:13:32Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/test2.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(13, entry.getSize()); + assertEquals(1755090812000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T13:13:32Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(13, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0x7757, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel0FileWithFoldersMsdos() throws IOException { + // The lha file was generated by LHA32 v2.67.00 for Windows + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-msdos-l0.lha"), null, '/')) { + LhaArchiveEntry entry; + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755081308000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:35:08Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755081336000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:35:36Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755081340000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:35:40Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/test2.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertFalse(entry.getOsId().isPresent()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel1File() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_1_FILE)))) { + // Entry should be parsed correctly + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(57, entry.getSize()); + assertEquals(1754229743000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-03T14:02:23Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(52, entry.getCompressedSize()); + assertEquals("-lh5-", entry.getCompressionMethod()); + assertEquals(0x6496, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(0100644, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel1FileMsdosChecksumAndCrc() throws IOException { + // The lha file was generated by LHA32 v2.67.00 for Windows + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream( + toByteArray(VALID_HEADER_LEVEL_1_FILE_MSDOS_WITH_CHECKSUM_AND_CRC)), null, '/')) { + LhaArchiveEntry entry; + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755097078000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T14:57:58Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertEquals(0x0010, entry.getMsdosFileAttributes().get()); + assertEquals(0xb772, entry.getHeaderCrc().get()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0x9b71, entry.getHeaderCrc().get()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testInvalidHeaderLevel1Length() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_1_FILE); + + data[0] = 0x10; // Change the first byte to an invalid length + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid header length"); + } catch (ArchiveException e) { + assertEquals("Invalid header level 1 length: 16", e.getMessage()); + } + } + + @Test + void testInvalidHeaderLevel1Checksum() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_1_FILE); + + data[1] = 0x55; // Change the second byte to an invalid header checksum + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid header checksum"); + } catch (ArchiveException e) { + assertEquals("Invalid header level 1 checksum", e.getMessage()); + } + } + + @Test + void testInvalidHeaderLevel1Crc() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_1_FILE_MSDOS_WITH_CHECKSUM_AND_CRC); + + // Change header CRC to an invalid value + data[41] = 0x33; + data[42] = 0x22; + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid header checksum"); + } catch (ArchiveException e) { + assertEquals("Invalid header CRC expected=0xb772 found=0x2233", e.getMessage()); + } + } + + @Test + void testParseHeaderLevel1FileWithFoldersMacos() throws IOException { + // The lha file was generated by LHa for UNIX version 1.14i-ac20211125 for Macos + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l1.lha"), null, '/')) { + LhaArchiveEntry entry; + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755083490000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:11:30Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(040755, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755083529000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:12:09Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(040755, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(13, entry.getSize()); + assertEquals(1755083529000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:12:09Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(13, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0x7757, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(0100644, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755083612000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:13:32Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(040755, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/test2.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(13, entry.getSize()); + assertEquals(1755083612000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:13:32Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(13, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0x7757, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(0100644, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertFalse(entry.getHeaderCrc().isPresent()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel1FileWithFoldersMsdos() throws IOException { + // The lha file was generated by LHA32 v2.67.00 for Windows + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-msdos-l1.lha"), null, '/')) { + LhaArchiveEntry entry; + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755081308000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:35:08Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertEquals(0x0010, entry.getMsdosFileAttributes().get()); + assertEquals(0xd458, entry.getHeaderCrc().get()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755081336000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:35:36Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertEquals(0x0010, entry.getMsdosFileAttributes().get()); + assertEquals(0x40de, entry.getHeaderCrc().get()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0x34b0, entry.getHeaderCrc().get()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755081340000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:35:40Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertEquals(0x0010, entry.getMsdosFileAttributes().get()); + assertEquals(0x21b2, entry.getHeaderCrc().get()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/test2.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0x8f0c, entry.getHeaderCrc().get()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel2File() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_2_FILE)), null, '/')) { + // Entry should be parsed correctly + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(57, entry.getSize()); + assertEquals(1754229743000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-03T14:02:23Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(52, entry.getCompressedSize()); + assertEquals("-lh5-", entry.getCompressionMethod()); + assertEquals(0x6496, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(0100644, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0x01a5, entry.getHeaderCrc().get()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testInvalidHeaderLevel2Length() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_2_FILE); + + data[0] = 0x10; // Change the first byte to an invalid length + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid header length"); + } catch (ArchiveException e) { + assertEquals("Invalid header level 2 length: 16", e.getMessage()); + } + } + + @Test + void testInvalidHeaderLevel2Checksum() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_2_FILE); + + // Change header CRC to an invalid value + data[27] = 0x33; + data[28] = 0x22; + + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid header checksum"); + } catch (ArchiveException e) { + assertEquals("Invalid header CRC expected=0x01a5 found=0x2233", e.getMessage()); + } + } + + @Test + void testParseHeaderLevel2FileWithFoldersAmiga() throws IOException { + // The lha file was generated by LhA 2.15 on Amiga + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-amiga-l2.lha"), null, '/')) { + LhaArchiveEntry entry; + + // No -lhd- directory entries in Amiga LHA files, so we expect only file entries + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertEquals(65, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0xe1a5, entry.getHeaderCrc().get()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/test2.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertEquals(65, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0xd6b0, entry.getHeaderCrc().get()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel2FileWithFoldersMacos() throws IOException { + // The lha file was generated by LHa for UNIX version 1.14i-ac20211125 for Macos + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l2.lha"), null, '/')) { + LhaArchiveEntry entry; + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755083490000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:11:30Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(040755, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0xf3f7, entry.getHeaderCrc().get()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755083529000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:12:09Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(040755, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0x50d3, entry.getHeaderCrc().get()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(13, entry.getSize()); + assertEquals(1755083529000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:12:09Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(13, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0x7757, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(0100644, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0x589e, entry.getHeaderCrc().get()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755083612000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:13:32Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(040755, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0x126d, entry.getHeaderCrc().get()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/test2.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(13, entry.getSize()); + assertEquals(1755083612000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T11:13:32Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(13, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0x7757, entry.getCrcValue()); + assertEquals(85, entry.getOsId().get()); + assertEquals(0100644, entry.getUnixPermissionMode().get()); + assertEquals(20, entry.getUnixGroupId().get()); + assertEquals(501, entry.getUnixUserId().get()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0xdbdd, entry.getHeaderCrc().get()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel2FileWithFoldersMsdos() throws IOException { + // The lha file was generated by LHA32 v2.67.00 for Windows + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-msdos-l2.lha"), null, '/')) { + LhaArchiveEntry entry; + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755081308000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:35:08Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertEquals(0x0010, entry.getMsdosFileAttributes().get()); + assertEquals(0x496a, entry.getHeaderCrc().get()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755081336000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:35:36Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertEquals(0x0010, entry.getMsdosFileAttributes().get()); + assertEquals(0xebe7, entry.getHeaderCrc().get()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-1/test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0x214a, entry.getHeaderCrc().get()); + + // Check directory entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/", entry.getName()); + assertTrue(entry.isDirectory()); + assertEquals(0, entry.getSize()); + assertEquals(1755081341000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:35:41Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(0, entry.getCompressedSize()); + assertEquals("-lhd-", entry.getCompressionMethod()); + assertEquals(0x0000, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertEquals(0x0010, entry.getMsdosFileAttributes().get()); + assertEquals(0x74ca, entry.getHeaderCrc().get()); + + // Check file entry + entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("dir1/dir1-2/test2.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertFalse(entry.getMsdosFileAttributes().isPresent()); + assertEquals(0x165f, entry.getHeaderCrc().get()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseHeaderLevel2FileWithMsdosAttributes() throws IOException { + // The lha file was generated by LHA32 v2.67.00 for Windows + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-msdos-l2-attrib.lha"))) { + // Check file entry + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("test1.txt", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1755081276000L, entry.getLastModifiedDate().getTime()); + assertEquals(ZonedDateTime.parse("2025-08-13T10:34:36Z"), entry.getLastModifiedDate().toInstant().atZone(ZoneOffset.UTC)); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lh0-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertEquals(77, entry.getOsId().get()); + assertFalse(entry.getUnixPermissionMode().isPresent()); + assertFalse(entry.getUnixGroupId().isPresent()); + assertFalse(entry.getUnixUserId().isPresent()); + assertEquals(0x0021, entry.getMsdosFileAttributes().get()); + assertEquals(0x14bb, entry.getHeaderCrc().get()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + + @Test + void testParseExtendedHeaderCommon() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + final LhaArchiveEntry entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x00, 0x22, 0x33, 0x00, 0x00), entry); + assertEquals(0x3322, entry.getHeaderCrc().get()); + } + } + + @Test + void testParseExtendedHeaderFilename() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + final LhaArchiveEntry entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); + assertEquals("test.txt", entry.getName()); + } + } + + @Test + void testParseExtendedHeaderDirectoryName() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '/')) { + final LhaArchiveEntry entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entry); + assertEquals("dir1/", entry.getName()); + } + } + + @Test + void testParseExtendedHeaderFilenameAndDirectoryName() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '/')) { + LhaArchiveEntry entry; + + // Test filename and directory name order + entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entry); + assertEquals("dir1/test.txt", entry.getName()); + + // Test filename and directory name order, no trailing slash + entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0x00, 0x00), entry); + assertEquals("dir1/test.txt", entry.getName()); + + // Test directory name and filename order + entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entry); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); + assertEquals("dir1/test.txt", entry.getName()); + + // Test directory name and filename order, no trailing slash + entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0x00, 0x00), entry); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); + assertEquals("dir1/test.txt", entry.getName()); + } + } + + @Test + void testParseExtendedHeaderUnixPermission() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + final LhaArchiveEntry entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x50, 0xa4, 0x81, 0x00, 0x00), entry); + assertEquals(0x81a4, entry.getUnixPermissionMode().get()); + assertEquals(0100644, entry.getUnixPermissionMode().get()); + } + } + + @Test + void testParseExtendedHeaderUnixUidGid() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + final LhaArchiveEntry entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x51, 0x14, 0x00, 0xf5, 0x01, 0x00, 0x00), entry); + assertEquals(0x0014, entry.getUnixGroupId().get()); + assertEquals(0x01f5, entry.getUnixUserId().get()); + } + } + + @Test + void testParseExtendedHeaderUnixTimestamp() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + final LhaArchiveEntry entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x54, 0x5c, 0x73, 0x9c, 0x68, 0x00, 0x00), entry); + assertEquals(0x689c735cL, entry.getLastModifiedDate().getTime() / 1000); + } + } + + @Test + void testParseExtendedHeaderMSdosFileAttributes() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + final LhaArchiveEntry entry = new LhaArchiveEntry(); + archive.parseExtendedHeader(toByteBuffer(0x40, 0x10, 0x00, 0x00), entry); + assertEquals(0x10, entry.getMsdosFileAttributes().get()); + } + } + + @Test + void testDecompressLh0() throws Exception { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0.lha"), null, File.separatorChar)) { + final List files = new ArrayList<>(); + files.add("dir1" + File.separatorChar); + files.add("dir1" + File.separatorChar + "dir1-1" + File.separatorChar); + files.add("dir1" + File.separatorChar + "dir1-1" + File.separatorChar + "test1.txt"); + files.add("dir1" + File.separatorChar + "dir1-2" + File.separatorChar); + files.add("dir1" + File.separatorChar + "dir1-2" + File.separatorChar + "test2.txt"); + checkArchiveContent(archive, files); + } + } + + @Test + void testDecompressLh4() throws Exception { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-amiga-l0-lh4.lha"))) { + final List files = new ArrayList<>(); + files.add("lorem-ipsum.txt"); + checkArchiveContent(archive, files); + } + } + + @Test + void testDecompressLh5() throws Exception { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh5.lha"))) { + final List files = new ArrayList<>(); + files.add("lorem-ipsum.txt"); + checkArchiveContent(archive, files); + } + } + + /** + * Test decompressing a file with lh5 compression that contains only one characters and thus is + * basically RLE encoded. The distance tree contains only one entry (root node). + */ + @Test + void testDecompressLh5Rle() throws Exception { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh5-rle.lha"))) { + final List files = new ArrayList<>(); + files.add("rle.txt"); + checkArchiveContent(archive, files); + } + } + + @Test + void testDecompressLh6() throws Exception { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh6.lha"))) { + final List files = new ArrayList<>(); + files.add("lorem-ipsum.txt"); + checkArchiveContent(archive, files); + } + } + + @Test + void testDecompressLh7() throws Exception { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh7.lha"))) { + final List files = new ArrayList<>(); + files.add("lorem-ipsum.txt"); + checkArchiveContent(archive, files); + } + } + + @Test + void testMatches() { + byte[] data; + + assertTrue(LhaArchiveInputStream.matches(toByteArray(VALID_HEADER_LEVEL_0_FILE), VALID_HEADER_LEVEL_0_FILE.length)); + assertTrue(LhaArchiveInputStream.matches(toByteArray(VALID_HEADER_LEVEL_1_FILE), VALID_HEADER_LEVEL_1_FILE.length)); + assertTrue(LhaArchiveInputStream.matches(toByteArray(VALID_HEADER_LEVEL_2_FILE), VALID_HEADER_LEVEL_2_FILE.length)); + + // Header to short + data = toByteArray(0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09); + assertFalse(LhaArchiveInputStream.matches(data, data.length)); + + // Change the header level to an invalid value + data = toByteArray(VALID_HEADER_LEVEL_0_FILE); + data[20] = 4; + assertFalse(LhaArchiveInputStream.matches(data, data.length)); + + // Change the compression method to an invalid value + data = toByteArray(VALID_HEADER_LEVEL_0_FILE); + data[6] = 0x08; + assertFalse(LhaArchiveInputStream.matches(data, data.length)); + } + + @Test + void testGetCompressionMethod() throws IOException { + assertEquals("-lh0-", LhaArchiveInputStream.getCompressionMethod(ByteBuffer.wrap(toByteArray(0x00, 0x00, '-', 'l', 'h', '0', '-')))); + assertEquals("-lhd-", LhaArchiveInputStream.getCompressionMethod(ByteBuffer.wrap(toByteArray(0x00, 0x00, '-', 'l', 'h', 'd', '-')))); + + try { + LhaArchiveInputStream.getCompressionMethod(ByteBuffer.wrap(toByteArray(0x00, 0x00, '-', 'l', 'h', '0', 0xff))); + fail("Expected ArchiveException for invalid compression method"); + } catch (ArchiveException e) { + assertEquals("Invalid compression method: 0x2d 0x6c 0x68 0x30 0xff", e.getMessage()); + } + } + + @Test + void testGetPathnameUnixFileSeparatorCharDefaultEncoding() throws IOException, UnsupportedEncodingException { + try (LhaArchiveInputStream is = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '/')) { + assertEquals("folder/", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff)); + assertEquals("folder/file.txt", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); + assertEquals("folder/file.txt", getPathname(is, 0xff, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); + assertEquals("folder/file.txt", getPathname(is, '\\', 'f', 'o', 'l', 'd', 'e', 'r', '\\', 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); + + // Unicode replacement characters for unsupported characters + assertEquals("\uFFFD/\uFFFD/\uFFFD.txt", getPathname(is, 0xe5, 0xff, 0xe4, 0xff, 0xf6, '.', 't', 'x', 't')); + assertEquals("\uFFFD/\uFFFD/\uFFFD.txt", getPathname(is, 0xe5, '\\', 0xe4, '\\', 0xf6, '.', 't', 'x', 't')); + } + } + + @Test + void testGetPathnameUnixFileSeparatorCharIso88591() throws IOException, UnsupportedEncodingException { + try (LhaArchiveInputStream is = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), "ISO-8859-1", '/')) { + assertEquals("\u00E5/\u00E4/\u00F6.txt", getPathname(is, 0xe5, 0xff, 0xe4, 0xff, 0xf6, '.', 't', 'x', 't')); + assertEquals("\u00E5/\u00E4/\u00F6.txt", getPathname(is, 0xe5, '\\', 0xe4, '\\', 0xf6, '.', 't', 'x', 't')); + } + } + + @Test + void testGetPathnameWindowsFileSeparatorCharDefaultEncoding() throws IOException, UnsupportedEncodingException { + try (LhaArchiveInputStream is = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '\\')) { + assertEquals("folder\\", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff)); + assertEquals("folder\\file.txt", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); + assertEquals("folder\\file.txt", getPathname(is, 0xff, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); + assertEquals("folder\\file.txt", getPathname(is, '\\', 'f', 'o', 'l', 'd', 'e', 'r', '\\', 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); + + // Unicode replacement characters for unsupported characters + assertEquals("\uFFFD\\\uFFFD\\\uFFFD.txt", getPathname(is, 0xe5, 0xff, 0xe4, 0xff, 0xf6, '.', 't', 'x', 't')); + assertEquals("\uFFFD\\\uFFFD\\\uFFFD.txt", getPathname(is, 0xe5, '\\', 0xe4, '\\', 0xf6, '.', 't', 'x', 't')); + } + } + + @Test + void testGetPathnameWindowsFileSeparatorCharIso88591() throws IOException, UnsupportedEncodingException { + try (LhaArchiveInputStream is = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), "ISO-8859-1", '\\')) { + assertEquals("\u00E5\\\u00E4\\\u00F6.txt", getPathname(is, 0xe5, 0xff, 0xe4, 0xff, 0xf6, '.', 't', 'x', 't')); + assertEquals("\u00E5\\\u00E4\\\u00F6.txt", getPathname(is, 0xe5, '\\', 0xe4, '\\', 0xf6, '.', 't', 'x', 't')); + } + } + + private static byte[] toByteArray(final int... data) { + final byte[] bytes = new byte[data.length]; + for (int i = 0; i < data.length; i++) { + bytes[i] = (byte) data[i]; + } + return bytes; + } + + private static ByteBuffer toByteBuffer(final int... data) { + return ByteBuffer.wrap(toByteArray(data)).order(ByteOrder.LITTLE_ENDIAN); + } + + private String getPathname(final LhaArchiveInputStream is, final int... filepathBuffer) throws UnsupportedEncodingException { + return is.getPathname(ByteBuffer.wrap(toByteArray(filepathBuffer)), filepathBuffer.length); + } + + /** + * The timestamp used in header level 0 and 1 entries has no time zone information and is + * converted in the system default time zone. This method converts the date to UTC to verify + * the timestamp in unit tests. + * + * @param date the date to convert + * @return a ZonedDateTime in UTC + */ + private ZonedDateTime convertSystemTimeZoneDateToUTC(final Date date) { + return date.toInstant().atZone(ZoneId.systemDefault()).withZoneSameLocal(ZoneOffset.UTC); + } +} diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStreamTest.java new file mode 100644 index 00000000000..278090d7763 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStreamTest.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.ByteOrder; + +import org.apache.commons.compress.compressors.CompressorException; +import org.apache.commons.compress.utils.BitInputStream; +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.Test; + +class AbstractLhStaticHuffmanCompressorInputStreamTest { + @Test + void testInputStreamStatistics() throws IOException { + final int[] compressedData = { + 0x00, 0x05, 0x28, 0x04, 0x4b, 0xfc, 0x16, 0xed, + 0x37, 0x00, 0x43, 0x00 + }; + + try (Lh5CompressorInputStream in = createLh5CompressorInputStream(compressedData)) { + final byte[] decompressedData = IOUtils.toByteArray(in); + + assertEquals(1024, decompressedData.length); + for (int i = 0; i < decompressedData.length; i++) { + assertEquals('A', decompressedData[i], "Byte at position " + i); + } + + assertEquals(12, in.getCompressedCount()); + assertEquals(1024, in.getUncompressedCount()); + } + } + + @Test + void testReadCommandDecodingTreeWithSingleValue() throws IOException { + final BinaryTree tree = createLh5CompressorInputStream( + 0b00000000, 0b00111111 // 5 bits length (0x00) and 5 bits the root value (0x00) + ).readCommandDecodingTree(); + + assertEquals(0, tree.read(new BitInputStream(new ByteArrayInputStream(new byte[0]), ByteOrder.BIG_ENDIAN))); + } + + @Test + void testReadCommandDecodingTreeWithInvalidSize() throws IOException { + try { + createLh5CompressorInputStream( + 0b10100000, 0b00000000 // 5 bits length (0x14 = 20) + ).readCommandDecodingTree(); + + fail("Expected CompressorException for table invalid size"); + } catch (CompressorException e) { + assertEquals("Code length table has invalid size (20 > 19)", e.getMessage()); + } + } + + @Test + void testReadCommandTreeWithSingleValue() throws IOException { + final BinaryTree tree = createLh5CompressorInputStream( + 0b00000000, 0b01111111, 0b01000000 // 9 bits length (0x00) and 9 bits the root value (0x01fd = 509) + ).readCommandTree(new BinaryTree(new int [] { 0 })); + + assertEquals(0x01fd, tree.read(new BitInputStream(new ByteArrayInputStream(new byte[0]), ByteOrder.BIG_ENDIAN))); + } + + @Test + void testReadCommandTreeWithInvalidSize() throws IOException { + try { + createLh5CompressorInputStream( + 0b11111111, 0b10000000 // 9 bits length (0x01ff = 511) + ).readCommandTree(new BinaryTree(new int [] { 0 })); + + fail("Expected CompressorException for table invalid size"); + } catch (CompressorException e) { + assertEquals("Code length table has invalid size (511 > 510)", e.getMessage()); + } + } + + @Test + void testReadCodeLength() throws IOException { + assertEquals(0, createLh5CompressorInputStream(0x00, 0x00).readCodeLength()); // 0000 0000 0000 0000 + assertEquals(1, createLh5CompressorInputStream(0x20, 0x00).readCodeLength()); // 0010 0000 0000 0000 + assertEquals(2, createLh5CompressorInputStream(0x40, 0x00).readCodeLength()); // 0100 0000 0000 0000 + assertEquals(3, createLh5CompressorInputStream(0x60, 0x00).readCodeLength()); // 0110 0000 0000 0000 + assertEquals(4, createLh5CompressorInputStream(0x80, 0x00).readCodeLength()); // 1000 0000 0000 0000 + assertEquals(5, createLh5CompressorInputStream(0xa0, 0x00).readCodeLength()); // 1010 0000 0000 0000 + assertEquals(6, createLh5CompressorInputStream(0xc0, 0x00).readCodeLength()); // 1100 0000 0000 0000 + assertEquals(7, createLh5CompressorInputStream(0xe0, 0x00).readCodeLength()); // 1110 0000 0000 0000 + assertEquals(8, createLh5CompressorInputStream(0xf0, 0x00).readCodeLength()); // 1111 0000 0000 0000 + assertEquals(9, createLh5CompressorInputStream(0xf8, 0x00).readCodeLength()); // 1111 1000 0000 0000 + assertEquals(10, createLh5CompressorInputStream(0xfc, 0x00).readCodeLength()); // 1111 1100 0000 0000 + assertEquals(11, createLh5CompressorInputStream(0xfe, 0x00).readCodeLength()); // 1111 1110 0000 0000 + assertEquals(12, createLh5CompressorInputStream(0xff, 0x00).readCodeLength()); // 1111 1111 0000 0000 + assertEquals(13, createLh5CompressorInputStream(0xff, 0x80).readCodeLength()); // 1111 1111 1000 0000 + assertEquals(14, createLh5CompressorInputStream(0xff, 0xc0).readCodeLength()); // 1111 1111 1100 0000 + assertEquals(15, createLh5CompressorInputStream(0xff, 0xe0).readCodeLength()); // 1111 1111 1110 0000 + assertEquals(16, createLh5CompressorInputStream(0xff, 0xf0).readCodeLength()); // 1111 1111 1111 0000 + + try { + assertEquals(17, createLh5CompressorInputStream(0xff, 0xf8).readCodeLength()); // 1111 1111 1111 1000 + fail("Expected CompressorException for code length overflow"); + } catch (CompressorException e) { + assertEquals("Code length overflow", e.getMessage()); + } + } + + private Lh5CompressorInputStream createLh5CompressorInputStream(final int... data) throws IOException { + final byte[] bytes = new byte[data.length]; + for (int i = 0; i < data.length; i++) { + bytes[i] = (byte) data[i]; + } + + return new Lh5CompressorInputStream(new ByteArrayInputStream(bytes)); + } +} diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java new file mode 100644 index 00000000000..7bc4083eca1 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java @@ -0,0 +1,244 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.ByteOrder; + +import org.apache.commons.compress.compressors.CompressorException; +import org.apache.commons.compress.utils.BitInputStream; +import org.junit.jupiter.api.Test; + +class BinaryTreeTest { + @Test + void testTree1() throws Exception { + final int[] length = new int[] { 4 }; + // Value: 0 + + // Special case where the single array value is the root node value + final BinaryTree tree = new BinaryTree(length); + + assertEquals(4, tree.read(createBitInputStream())); // Nothing to read, just return the root value + } + + @Test + void testTree2() throws Exception { + final int[] length = new int[] { 1, 1 }; + // Value: 0 1 + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(0, tree.read(createBitInputStream(0x00))); // 0xxx xxxx + assertEquals(1, tree.read(createBitInputStream(0x80))); // 1xxx xxxx + } + + @Test + void testTree3() throws Exception { + final int[] length = new int[] { 1, 0, 1 }; + // Value: 0 1 2 + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(0, tree.read(createBitInputStream(0x00))); // 0xxx xxxx + assertEquals(2, tree.read(createBitInputStream(0x80))); // 1xxx xxxx + } + + @Test + void testTree4() throws Exception { + final int[] length = new int[] { 2, 0, 1, 2 }; + // Value: 0 1 2 3 + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(2, tree.read(createBitInputStream(0x00))); // 0xxx xxxx + assertEquals(0, tree.read(createBitInputStream(0x80))); // 10xx xxxx + assertEquals(3, tree.read(createBitInputStream(0xc0))); // 11xx xxxx + } + + @Test + void testTree5() throws Exception { + final int[] length = new int[] { 2, 0, 0, 2, 1 }; + // Value: 0 1 2 3 4 + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(4, tree.read(createBitInputStream(0x00))); // 0xxx xxxx + assertEquals(0, tree.read(createBitInputStream(0x80))); // 10xx xxxx + assertEquals(3, tree.read(createBitInputStream(0xc0))); // 11xx xxxx + } + + @Test + void testTree6() throws Exception { + final int[] length = new int[] { 1, 0, 2, 3, 3 }; + // Value: 0 1 2 3 4 + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(0, tree.read(createBitInputStream(0x00))); // 0xxx xxxx + assertEquals(2, tree.read(createBitInputStream(0x80))); // 10xx xxxx + assertEquals(3, tree.read(createBitInputStream(0xc0))); // 110x xxxx + assertEquals(4, tree.read(createBitInputStream(0xe0))); // 111x xxxx + } + + @Test + void testTree7() throws Exception { + final int[] length = new int[] { 0, 0, 0, 0, 1, 1 }; + // Value: 0 1 2 3 4 5 + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(4, tree.read(createBitInputStream(0x00))); // 0xxx xxxx + assertEquals(5, tree.read(createBitInputStream(0x80))); // 1xxx xxxx + } + + @Test + void testTree8() throws Exception { + final int[] length = new int[] { 4, 2, 3, 0, 5, 5, 1 }; + // Value: 0 1 2 3 4 5 6 + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(6, tree.read(createBitInputStream(0x00))); // 0xxx xxxx + assertEquals(1, tree.read(createBitInputStream(0x80))); // 10xx xxxx + assertEquals(2, tree.read(createBitInputStream(0xc0))); // 110x xxxx + assertEquals(0, tree.read(createBitInputStream(0xe0))); // 1110 xxxx + assertEquals(4, tree.read(createBitInputStream(0xf0))); // 1111 0xxx + assertEquals(5, tree.read(createBitInputStream(0xf8))); // 1111 1xxx + } + + @Test + void testTree9() throws Exception { + final int[] length = new int[] { 5, 6, 6, 0, 0, 8, 7, 7, 7, 4, 3, 2, 2, 4, 5, 5, 5, 4, 8 }; + // Value: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(11, tree.read(createBitInputStream(0x00))); // 00xx xxxx + assertEquals(12, tree.read(createBitInputStream(0x40))); // 01xx xxxx + assertEquals(10, tree.read(createBitInputStream(0x80))); // 100x xxxx + assertEquals(9, tree.read(createBitInputStream(0xa0))); // 1010 xxxx + assertEquals(13, tree.read(createBitInputStream(0xb0))); // 1011 xxxx + assertEquals(17, tree.read(createBitInputStream(0xc0))); // 1100 xxxx + assertEquals(0, tree.read(createBitInputStream(0xd0))); // 1101 0xxx + assertEquals(14, tree.read(createBitInputStream(0xd8))); // 1101 1xxx + assertEquals(15, tree.read(createBitInputStream(0xe0))); // 1110 0xxx + assertEquals(16, tree.read(createBitInputStream(0xe8))); // 1110 1xxx + assertEquals(1, tree.read(createBitInputStream(0xf0))); // 1111 00xx + assertEquals(2, tree.read(createBitInputStream(0xf4))); // 1111 01xx + assertEquals(6, tree.read(createBitInputStream(0xf8))); // 1111 100x + assertEquals(7, tree.read(createBitInputStream(0xfa))); // 1111 101x + assertEquals(8, tree.read(createBitInputStream(0xfc))); // 1111 110x + assertEquals(5, tree.read(createBitInputStream(0xfe))); // 1111 1110 + assertEquals(18, tree.read(createBitInputStream(0xff))); // 1111 1111 + } + + @Test + void testTree10() throws Exception { + // Maximum length of 510 entries + final int[] length = new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 9, 0, 0, 0, + 0, 13, 8, 0, 0, 0, 0, 8, 9, 9, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 8, 8, 0, 0, 0, 9, 0, 9, 13, 11, 11, 10, 10, 10, 10, 9, 13, 11, 10, 11, + 10, 10, 10, 13, 11, 10, 9, 13, 13, 10, 0, 13, 0, 10, 0, 13, 0, 0, 0, 7, 8, 8, 7, 6, 8, 8, 8, 7, 10, 9, 7, 7, 7, 7, 8, 11, 7, 6, 7, 7, 9, + 8, 10, 8, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 4, 4, 4, 5, 5, 6, 7, 7, 7, 7, 8, 11, 10, 10, 12, 12, 0, 12, 12, 13, 0, 0, 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 }; + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(509, tree.read(createBitInputStream(0x00, 0x00))); // 0xxx xxxx xxxx xxxx + assertEquals(256, tree.read(createBitInputStream(0x80, 0x00))); // 1000 xxxx xxxx xxxx + assertEquals(257, tree.read(createBitInputStream(0x90, 0x00))); // 1001 xxxx xxxx xxxx + assertEquals(258, tree.read(createBitInputStream(0xa0, 0x00))); // 1010 xxxx xxxx xxxx + assertEquals(259, tree.read(createBitInputStream(0xb0, 0x00))); // 1011 0xxx xxxx xxxx + assertEquals(260, tree.read(createBitInputStream(0xb8, 0x00))); // 1011 1xxx xxxx xxxx + assertEquals(101, tree.read(createBitInputStream(0xc0, 0x00))); // 1100 00xx xxxx xxxx + assertEquals(115, tree.read(createBitInputStream(0xc4, 0x00))); // 1100 01xx xxxx xxxx + + assertEquals(93, tree.read(createBitInputStream(0xff, 0xe0))); // 1111 1111 1110 0xxx + assertEquals(122, tree.read(createBitInputStream(0xff, 0xe8))); // 1111 1111 1110 1xxx + assertEquals(275, tree.read(createBitInputStream(0xff, 0xf0))); // 1111 1111 1111 0xxx + assertEquals(283, tree.read(createBitInputStream(0xff, 0xf8))); // 1111 1111 1111 1xxx + } + + @Test + void testReadEof() throws Exception { + final int[] length = new int[] { 4, 2, 3, 0, 5, 5, 1 }; + // Value: 0 1 2 3 4 5 6 + + final BinaryTree tree = new BinaryTree(length); + + final BitInputStream in = createBitInputStream(0xfe); // 1111 1110 + + assertEquals(5, tree.read(in)); // 1111 1xxx + assertEquals(2, tree.read(in)); // 110x xxxx + assertEquals(-1, tree.read(in)); // EOF + } + + @Test + void testInvalidBitstream() throws Exception { + final int[] length = new int[] { 4, 2, 3, 0, 5, 0, 1 }; + // Value: 0 1 2 3 4 5 6 + + final BinaryTree tree = new BinaryTree(length); + + assertEquals(6, tree.read(createBitInputStream(0x00))); // 0xxx xxxx + assertEquals(1, tree.read(createBitInputStream(0x80))); // 10xx xxxx + assertEquals(2, tree.read(createBitInputStream(0xc0))); // 110x xxxx + assertEquals(0, tree.read(createBitInputStream(0xe0))); // 1110 xxxx + assertEquals(4, tree.read(createBitInputStream(0xf0))); // 1111 0xxx + + try { + assertEquals(5, tree.read(createBitInputStream(0xf8))); // 1111 1xxx + fail("Expected CompressorException for invalid bitstream"); + } catch (CompressorException e) { + assertEquals("Invalid bitstream. The node at index 62 is not defined.", e.getMessage()); + } + } + + @Test + void testCheckMaxDepth() throws Exception { + try { + final int[] length = new int[] { 1, 17 }; + + new BinaryTree(length); + fail("Expected IllegalArgumentException for depth > 16"); + } catch (IllegalArgumentException e) { + assertEquals("Depth must not be negative and not bigger than 16 but is 17", e.getMessage()); + } + } + + private BitInputStream createBitInputStream(final int... data) throws IOException { + final byte[] bytes = new byte[data.length]; + for (int i = 0; i < data.length; i++) { + bytes[i] = (byte) data[i]; + } + + return new BitInputStream(new ByteArrayInputStream(bytes), ByteOrder.BIG_ENDIAN); + } +} diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java new file mode 100644 index 00000000000..3b59834715b --- /dev/null +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.compress.AbstractTest; +import org.apache.commons.compress.archivers.lha.LhaArchiveEntry; +import org.apache.commons.compress.archivers.lha.LhaArchiveInputStream; +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.Test; + +class Lh4CompressorInputStreamTest extends AbstractTest { + @Test + void testConfiguration() throws IOException { + try (Lh4CompressorInputStream in = new Lh4CompressorInputStream(new ByteArrayInputStream(new byte[0]))) { + assertEquals(12, in.getDictionaryBits()); + assertEquals(4096, in.getDictionarySize()); + assertEquals(4, in.getDistanceBits()); + assertEquals(14, in.getDistanceCodeSize()); + assertEquals(256, in.getMaxMatchLength()); + assertEquals(510, in.getMaxNumberOfCommands()); + } + } + + @Test + void testDecompress() throws IOException { + // This file was created using LhA 1.38 on Amiga + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-amiga-l0-lh4.lha"))) { + // Check entry + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("lorem-ipsum.txt", entry.getName()); + assertEquals(144060, entry.getSize()); + assertEquals(41583, entry.getCompressedSize()); + assertEquals("-lh4-", entry.getCompressionMethod()); + assertEquals(0x8c8a, entry.getCrcValue()); + + // Decompress entry + assertTrue(archive.canReadEntryData(entry)); + final byte[] data = IOUtils.toByteArray(archive); + + assertEquals(144060, data.length); + assertEquals("\nLorem ipsum", new String(data, 0, 12, StandardCharsets.US_ASCII)); + } + } +} diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java new file mode 100644 index 00000000000..fc32ebb6a8e --- /dev/null +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.compress.AbstractTest; +import org.apache.commons.compress.archivers.lha.LhaArchiveEntry; +import org.apache.commons.compress.archivers.lha.LhaArchiveInputStream; +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.Test; + +class Lh5CompressorInputStreamTest extends AbstractTest { + @Test + void testConfiguration() throws IOException { + try (Lh5CompressorInputStream in = new Lh5CompressorInputStream(new ByteArrayInputStream(new byte[0]))) { + assertEquals(8192, in.getDictionarySize()); + assertEquals(13, in.getDictionaryBits()); + assertEquals(4, in.getDistanceBits()); + assertEquals(14, in.getDistanceCodeSize()); + assertEquals(256, in.getMaxMatchLength()); + assertEquals(510, in.getMaxNumberOfCommands()); + } + } + + @Test + void testDecompress() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh5.lha"))) { + // Check entry + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("lorem-ipsum.txt", entry.getName()); + assertEquals(144060, entry.getSize()); + assertEquals(39999, entry.getCompressedSize()); + assertEquals("-lh5-", entry.getCompressionMethod()); + assertEquals(0x8c8a, entry.getCrcValue()); + + // Decompress entry + assertTrue(archive.canReadEntryData(entry)); + final byte[] data = IOUtils.toByteArray(archive); + + assertEquals(144060, data.length); + assertEquals("\nLorem ipsum", new String(data, 0, 12, StandardCharsets.US_ASCII)); + } + } +} diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java new file mode 100644 index 00000000000..b7e84463322 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.compress.AbstractTest; +import org.apache.commons.compress.archivers.lha.LhaArchiveEntry; +import org.apache.commons.compress.archivers.lha.LhaArchiveInputStream; +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.Test; + +class Lh6CompressorInputStreamTest extends AbstractTest { + @Test + void testConfiguration() throws IOException { + try (Lh6CompressorInputStream in = new Lh6CompressorInputStream(new ByteArrayInputStream(new byte[0]))) { + assertEquals(15, in.getDictionaryBits()); + assertEquals(32768, in.getDictionarySize()); + assertEquals(5, in.getDistanceBits()); + assertEquals(16, in.getDistanceCodeSize()); + assertEquals(256, in.getMaxMatchLength()); + assertEquals(510, in.getMaxNumberOfCommands()); + } + } + + @Test + void testDecompress() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh6.lha"))) { + // Check entry + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("lorem-ipsum.txt", entry.getName()); + assertEquals(144060, entry.getSize()); + assertEquals(38037, entry.getCompressedSize()); + assertEquals("-lh6-", entry.getCompressionMethod()); + assertEquals(0x8c8a, entry.getCrcValue()); + + // Decompress entry + assertTrue(archive.canReadEntryData(entry)); + final byte[] data = IOUtils.toByteArray(archive); + + assertEquals(144060, data.length); + assertEquals("\nLorem ipsum", new String(data, 0, 12, StandardCharsets.US_ASCII)); + } + } +} diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java new file mode 100644 index 00000000000..4a1825c563e --- /dev/null +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.lha; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.compress.AbstractTest; +import org.apache.commons.compress.archivers.lha.LhaArchiveEntry; +import org.apache.commons.compress.archivers.lha.LhaArchiveInputStream; +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.Test; + +class Lh7CompressorInputStreamTest extends AbstractTest { + @Test + void testConfiguration() throws IOException { + try (Lh7CompressorInputStream in = new Lh7CompressorInputStream(new ByteArrayInputStream(new byte[0]))) { + assertEquals(16, in.getDictionaryBits()); + assertEquals(65536, in.getDictionarySize()); + assertEquals(5, in.getDistanceBits()); + assertEquals(17, in.getDistanceCodeSize()); + assertEquals(256, in.getMaxMatchLength()); + assertEquals(510, in.getMaxNumberOfCommands()); + } + } + + @Test + void testDecompress() throws IOException { + try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh7.lha"))) { + // Check entry + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("lorem-ipsum.txt", entry.getName()); + assertEquals(144060, entry.getSize()); + assertEquals(37401, entry.getCompressedSize()); + assertEquals("-lh7-", entry.getCompressionMethod()); + assertEquals(0x8c8a, entry.getCrcValue()); + + // Decompress entry + assertTrue(archive.canReadEntryData(entry)); + final byte[] data = IOUtils.toByteArray(archive); + + assertEquals(144060, data.length); + assertEquals("\nLorem ipsum", new String(data, 0, 12, StandardCharsets.US_ASCII)); + } + } +} diff --git a/src/test/java/org/apache/commons/compress/utils/CircularBufferTest.java b/src/test/java/org/apache/commons/compress/utils/CircularBufferTest.java index a83939cca8b..7c13ddf6cab 100644 --- a/src/test/java/org/apache/commons/compress/utils/CircularBufferTest.java +++ b/src/test/java/org/apache/commons/compress/utils/CircularBufferTest.java @@ -47,7 +47,7 @@ void testPutAndGet1() { @Test void testPutAndGet2() { - CircularBuffer buffer = new CircularBuffer(8); + final CircularBuffer buffer = new CircularBuffer(8); // Nothing to read assertFalse(buffer.available()); @@ -74,14 +74,14 @@ void testPutAndGet2() { @Test void testPutAndGetWrappingAround() { - CircularBuffer buffer = new CircularBuffer(4); + final CircularBuffer buffer = new CircularBuffer(4); // Nothing to read assertFalse(buffer.available()); assertEquals(-1, buffer.get()); // Write two bytes and read them in a loop making the buffer wrap around several times - for (int i=0;i<8;i++) { + for (int i = 0; i < 8; i++) { buffer.put(i * 2); buffer.put(i * 2 + 1); @@ -95,7 +95,7 @@ void testPutAndGetWrappingAround() { @Test void testPutOverflow() { - CircularBuffer buffer = new CircularBuffer(4); + final CircularBuffer buffer = new CircularBuffer(4); // Write more bytes than the buffer can hold buffer.put(0x01); @@ -148,7 +148,7 @@ void testCopy1() { @Test void testCopy2() { - CircularBuffer buffer = new CircularBuffer(16); + final CircularBuffer buffer = new CircularBuffer(16); // Write some bytes buffer.put(0x01); @@ -171,7 +171,7 @@ void testCopy2() { @Test void testCopy3() { - CircularBuffer buffer = new CircularBuffer(16); + final CircularBuffer buffer = new CircularBuffer(16); // Write some bytes buffer.put(0x01); @@ -194,7 +194,7 @@ void testCopy3() { @Test void testCopy4() { - CircularBuffer buffer = new CircularBuffer(6); + final CircularBuffer buffer = new CircularBuffer(6); // Write some bytes buffer.put(0x01); @@ -230,7 +230,7 @@ void testCopy4() { @Test void testCopyRunLengthEncoding1() { - CircularBuffer buffer = new CircularBuffer(16); + final CircularBuffer buffer = new CircularBuffer(16); // Write two bytes buffer.put(0x01); @@ -256,7 +256,7 @@ void testCopyRunLengthEncoding1() { @Test void testCopyDistanceInvalid() { - CircularBuffer buffer = new CircularBuffer(4); + final CircularBuffer buffer = new CircularBuffer(4); // Write some bytes buffer.put(0x01); @@ -272,7 +272,7 @@ void testCopyDistanceInvalid() { @Test void testCopyDistanceExceedingBufferSize() { - CircularBuffer buffer = new CircularBuffer(4); + final CircularBuffer buffer = new CircularBuffer(4); // Write some bytes buffer.put(0x01); @@ -290,7 +290,7 @@ void testCopyDistanceExceedingBufferSize() { @Test void testCopyCausingBufferOverflow() { - CircularBuffer buffer = new CircularBuffer(4); + final CircularBuffer buffer = new CircularBuffer(4); // Write some bytes buffer.put(0x01); diff --git a/src/test/resources/bla.lha b/src/test/resources/bla.lha new file mode 100644 index 0000000000000000000000000000000000000000..2add0854a6f25c2c8b1f0b2c027da47dbb3d196c GIT binary patch literal 55 zcmdP2*UiZ=)#YXY0u}~_-fZ4z1qSw_oK(G%ijo$IP=?!Ob264Req|J4U|`i?@%|(B J)||na0RU~14xIo1 literal 0 HcmV?d00001 diff --git a/src/test/resources/test-amiga-l0-lh4.lha b/src/test/resources/test-amiga-l0-lh4.lha new file mode 100644 index 0000000000000000000000000000000000000000..2231d7d1d4f7174f19c2e26ae16592168afb90cc GIT binary patch literal 41623 zcmV(zK<2+CtSxM4G%atU006u)0sy3H4FLcE4{UF8Wo<2KaC3ESE_8Twii{W#gOakz zEp69f%z$zuqtPx250pS4AAAxJCLg<*e%-b+Gc?{^?44@4&Q(`#_kHq^NH`<{4nPhG z$N|GOpndndSkhm+`FPVV+_U%dw{BfLb@8V!IqS;(vew+W`0wW~dgZR#a=%Rd^YhO> z{=Bm5p1*Ei(`@|zeX`}#KVJFy^Pg^B{ygoLnR45%x@XTXKW<;cZ#?qleR*`hzJ8l| z<<~DCZeOn5df&Gz?ayBtTbEq@cI$p!ymjlJo?30Ljl#3PJ)8Smmu&fVzn?ed_`f{c z@VCP5|9;%N`0vZ#+WKX#mv0&B+%L;|Y`gNyZd&EnuFb%H_uEGLQSbKCz4>O}Uu_%u z{9jDl|7q>xzc=}x=dNwV{|&|m%*)p=I^DnijnHphqwUM|(f>BF&b>O{zf9X0el%=9 zn{nPv@^fwp+uvsBck7?`*}Tt2^WSb?$Gh+m(HrA>dk@2|efj6pZu;`?mov$mqOY;` z<@@|U<~o#it5+tx=Lh?C#wc!V+a3DI<`K3dm$yFj%TE=uyx-S7b$_sOZe2J4UbAh!B*+wJ?mckcam@6Iwyb^UVFqjI(CUwOCg_|IGA%dd}58b75T0y^9O zr%L15TaX1@{Aur&jt>ulvwWVv8TY=Nm>}x@5xAH4Zi~yuO#E`&UQPEHS3p$1K6`fa ze(%kk8%KEmEW702mUxQ!-9Zy?3UbD7GKWBHpx=YexxOts@N3i72&ee**KK|7N%u_q zW#3l89d7ttvW=VOd}X5xF4?!0U@C$j_L$nt2=naS+opLQw`%CqvU>r%{0(OT`1-+eRfnDepfe%!yGwfur#1fIlS=h6ukAKP+bDaZ`!=&zs_6kDuhefcqF4Ewy}< z7;T}hU=0m(+Td&?AZ{l7eS2ooS#_aiLX_soL^txsH;#sOi+|kv=Y9n{8;1V|Zmv^C zR*zSWIgH5#-W0%Oc;MB@`{my6(F$$1qj1IBw~n3%iIrF)G;9~pBr>g#`1uDLIL@3- z-A}b5LtzF2&FR=)Dt2-l-0$95c!R}*&6z@1~-hkiIaqZg|U*v*F@ zvLn|8=eC|*6aV=IGQmUM?y zoZ?(XFNkTJ_{Y(*e4!Q>ft0!8HhqA6$>ZTj$_!Gb1EU$o9hkrg{)`;h`ZdDHmUM+D z?3!V^h!=;DEs-0Jc!Qf+>|`~z5ODdv>wL!#(4uY%h0fOgdS5RsovIlWr!Ay&{4kPa zdS+&cs#otArH<5b_zfwzp#rg*aM^MtvUj}sD?rUQE!_5H(4@rpznIA_>=*BWpjt!c z*4V(>_=>$j4#ig=F42!~>NLc^>DPfklRhv$`GykMjE8XC&zAL}+%v)00y8~FzoBFyP@Q{^R`msB;6Jb$SOengZ@ ziIjitIRhlGRwSjxL_Tn$U&LG$R~)^KhXq!eUT)OwiVE%a0;0ntd3}^%RGjoh?L-iK zsep>asW0E5$J&d(v6YyXt(1AxykvBePvFZ`fN>$8SaC90aaQX2!HRsnRwp0xrF0dK ziQzgE7}kIG7ya^mBrm~i7FYB0Gm~8jyMv>610=k}tqe9Vs`69NjXhulNxlgNDEege zDINb~r*zl6@#Jxc0DLa1y7u=tp-W29Gwq))`pn286=~z*cM8T^*$`fA4!82dSz*}P ztS2A8@C$EUNe1Ix2FUE9Y{vUg|7FxL?15v}odjgDz?O*q4yR=!GN9wQbP{> zsWlF=hCar?OR2h67x7Zpl9U#RqJ~o=Wg>U{hoG7z0y$ILh%;vOXjh1q23{m?=c8Cq zo-q*_QhO$Yl)Y!Q6WP3In9XD2hy)>Dzi*Ib z002=qveN}#emgR&_$zGxj*>K=z>&hT$EX=Q;I1Ml@K9%zv^+GgdDVD!FdWlu^Q)@> z^JggxXZ|jAaEAm;hWs^{(USoEVC1_Hs?y3<#G_)bo$v0P>PHM0OfGH1&%XZMY$Wo2P?kJI&8}pvD9sLiHA57H zWZs3GCd$ZBmpI+lOuW@t6+wvR1X<)LBcaRqH$DTkI#FsXU~`sAcVMJCQvX(dy%SQX zcx1!39W$VL-nuH<^g&2sa0irOc#5EdZ)`;H(13hR+xc<8C1bBaEYG>$UrQ@U^61Pi z6*%i8ln<9Op~C|H4CD+$Sr7&wub8Bid%rS{qHMqKPk@M~!yfu)+c3>g66jN&$aST$ z0l=`;N4|Au$&i_{L#(t8n9=9kZLiI`4nvoVOAeG?D*DuRY;n}l!#qb(^O&gB<7les z;)pv4j)_Nq`ZlYe$8C2>P^zX(cR8e_{AB$mM7;J#Fr-LZ67O_2JAJ$@u(2X5;&fZu z{TSeF95@p2vGuT1K&zRO&)dWo`T?Y(3~IQ5tw^ z3{nxe97JWtmzEMfmG90` zthsy(IjFpaJ5Y%!T5P4n3~|QYsE?OBA27 z#V!Zqn<5EfwdxMFVDZ}OEl78u5%CE#sx&nbfm)JbOkyN0oaa#!B~kls8a`Mvp4vKb zwl+yL{$n2_6Y#u1*5$t!D;~a{ykbs%G+_L@n;!^Mxj>smRRVUz(U${;0j6cWiAwT5ezLSJ#NPb2;7?IE;zgv3Ux+CSa zrGtvtqpP2JS?byH#tM`K^p2Pb>Gg=Ec@5$QGm2`pJ**iTB6K)@8d(tdd zJK~iZ9V~D-Q34hiY=*dy5|9RRE9+#&8^soi(f%4^|oa2TfEZ^aRoXYPEMq?Cl z&hJqhf%qWSA+VnX$Ef0M+%}ZSlgCW_4rWk#cT|nJr5+-<;O-1!!bv7)kNEGpN|4%Z z@h%~sP5yPJ2;>Ty*GYQq77`X`lwvYm-e9q2gI4{f5@}LxDV}lMA7Cd$iZ=)-&Y*?@ zd(|K0KF@?v3+Qk5Jlyyj$-N^oyB58c~DHk$I;29rJDNh_&1j z-Q;f{Qhyi$lY3cnao4G{X^^Eqo+SQiEs}Wh?#b~`5Wyk)kta6*t}f+Oxoq%j#O$!7 zaE($}9xPhUf;wo*RAYF7!3$DRW`qp#dHi3q&H9phV4OR#?2@5KYd0LLQ1JA~QKI(} zJO5Mq7L33Wt24qlZqT+rJaPmVs^$l;38Kzed0e_tleSXK(M3V(yV!Z$Ng&OG2@@;s zSBUP{NnAS;u_RB~?n-aepvL-TOm~jRWWx7~4j)8;l4T+ju>$@SH~$T*Mu&(~nlAAc zSa%f$CSF$+pDV-v5L}6L?()yzB8t-c&y?&_n*bao8O`tPjKo;7m-Jp}iaWsEN5&uX zeFKxG&u6e>o_ur&a$!_gs6&R2m1_6#t9-2EM-Ed)X=x=4bSsupqnzF1Dg70_{Y7l& z!|uQ&M_}#!Zz<&%Er|71XBP^8mfzLo`MM4YgAR9PTxrB@S!p1ZJCjXa@Vf)3t@JG$ zqecV&VNhz56yP3a*3khE_s#9QOC3aD4yjpgB@Z|D!h9Dnh8x5+Ogq`9?eqY`9skvA zj}LJnN}lYP&bmnwOIU>YFZ-uLj+^)^m^l%net2Qy9o_JpBtz4|$EVwu^w~hRb9%(l zqS<(#HYcxnUcy&l$reneD&bkO1}vWxhX)4E`MblDn`AbQ*extR z#&1wrX)Ib5p+&boma7M^wC*boXV`c~6ifJay{)* z|286e%t@AXFRLfM5U-mSAbz79@O5{ZIvE0`-Q2aH>BWf+F+fz zY2C?RX4)oGJB(&{gSOw4`3m?gKuX+3c25i=8~9;r`LI)7l4M(!+o5_TF+q3IrT(L+ zhBRA+_zGLcs$n2D!G?V2<@}E}XP%jcR<%ru^2}J;HUh>OQeKk)eyDRVjTaPU9uXpA1sw8YnNi`Fp=sMheZS z1?9T3z4E`lqN@$$wP*o&=bwfE43|yT*xCp4Y}(%m{0n1^y&78G{CN@25;E`4FMU|M zdp=UWPS?p=j^$PU{GzQXi&RC*0J3+$*KNjW2qBR$Y{!&&buD|S$F>Fs? z*Gv3o(N$Q}n6f^3M#Im(>iG=I8@!-z<#wV69U^{KtI!sJ#`%#JDKP1RTmA}$^J$t< z-X-4YNc(1E9r$H>ZdeF!Z0jdTbcU)pYBT8#++{J=OhWzi&$e$$O7p-tt5OLsr+$S{ z=D^ph19#&&f8+kfQ~!~F}cF|SPS#aPUop@Rv=M_P(-xBvZZ*LAYxgDtC$j1H!K9Q3%Ue!TS{$%)|hyI;z%0H|# zu;Fzo2d#d1lWbsr-v1exB9#?4<41RxcomStzdPlykt759(f3&Vzjhcvha)QpYobP z%ExnskBAt}1m0CbLAOO{#!C`at7=3T9ld2rh|9zr{-2Uf@&OMc2Y)YqFSfPs*V`g) z#}$ILLBk8P+cIA#MVms7?T(Ni;abKp_~MYOLyLkDZ=O8PIRNV|tv_+UG(zBx>KF46 z72}*#Zsf1(ivHaI^+v&Q_pWyEb+duELBYvz3M#8pdR7#Rze6`J&nVFaUbf_|X_c@! z@6BBs>R(I8#xYSwE3xCIWv?K>?@u^L(&pDpmkJ^WJtwIJSh<6VB^`tM;%~asa~H0v z&k4c)nqH-ro@f__>n2r$xJD*e{5ZUo$?j%E`ojg{YOx04HrE7EwNhKgqU{VSH#si? znkC*vQDw8dclDM5vLDq+hH@OW&G}}(Iiz#LIPckrDEl=^GTYZUD(0O>Y7di8Qddt1 zDk6MlrB$lu5(aTmRZzsM$ABb>E3ol6g&UR`MJkEBWp|E?-?cCX2kJq=0(kc(m{x+M zFoh)n{I-8(QMCe8Yu4NDGBCU)suDskib2?>2NS;VM+^|cOQQyVM+Oq@;B!LDMf+${MMI)e z->iKxu2)O8?4I;cbJhfFdOc->crnUqjIwQzd$<(DSEXG^9R=!ZhDA)mJVpJ)-LSUH zv(Q%{s)%H3(J4ft5%VFnAI!}c;}`h_Ru}2hL(fV; zrD|wPa)?T8tS6x%)_QgrXZL??_T~LHA@P1);#}a682W0*h^q?&cs4!%ZePeVsiO&E z`bNF{X8V_|N|K7Ik0gWXW+)UQZ7-Vd2N9DD42*b6ZV2lX64Lc{6q9lI->ar)rNJfQ4Kj1m<@93Fz~s-=yC));qrigiJo zCi)YjagU1L#!pIeC>K*P5^{Z_{aGQ9FU!gg-vMj$k@arVnnkLov^l(1s382}9q_#x zKgL=w;w6P8dWo4ARRfFEEp88w3q<`!HX9QQDshY;<}gLVq!v>pvt*^gKe01xZSte2 zYrwo=3rg@~y9wU;!19P;3ZZ02im}~)5`Xu1fGzhCld%ManE6;6kIy~e;aV_OthGDOVf<0%k4$$tu#dv~ATaImarRxAd zn~rf&n)lN_!`#i7eyn)`{(XdUNOs7C{DnwtD0$pnqCwDA2B0qL7JOqS5|7z6Ga8~LtaQTS(w7IqgB>d#ca1Haj=(iA&g(WO+ zEnWLZKv(K>X6=1G)yVGg6@gc01#jtsB2RHunPPfHseta0c_Ot?VD~H)#Koe=6{#g1 z0_&%T2yZlqq()Y);}=>uwBDloEVl6MY+y=Ia~V!-|$Nmu1Iux2Am@%)2MJ6Pgo7MGwXrYDvZ; zLbD0A>CcF%LJiHwiyYBpK(}a9X}!_^|4k+A?+bCz9c?QWvoV4ELng7_|7QPOT0H9v zBedJJ9NeT{;U zB-@$tO2$2zY7A`}33T9%7;x!(8H`1csN0C5uA60IM@rS#;pL_V=Fy5CYGi-y>Ln}5 z?LF3IqeR^P;j8Vtj64Sp-w9n11+35(ML zXiBB?-sc7ku}2B2sSVWNNiqg1EQ{~s&e>^|1_%n%489O`^lBJmAMa5@{0ByCg5HiF39+# zLnVbp#spoty*I`Bi^fKRWM|@{Fk4~?0v9hTsIQHB+TgjcU|O;g$ypV9isfCdI(G&5G-?!hSpNY^AaxNsN};g>&0Lq|^fLvdk=t zjc7xBRq!8jIaQd9zULfo7&vLELMuTq^8DwvJB4tE->PD<_vhyCVR!MXuRE84w9dno zRI&qKj_@B3#T)+;KBM_+pnziSwX&@l7?V>vZ*#fh*+%N5nNmMZlKd>bs2g&=8TXVt zb5Q{lSRnHL*`Wh$C0jPQKBAhqgJMe6IB%nPsSU05Dnkf~T;aEA)uEr~Z!A&sCAi10Ea6Hh19H3O7mwa;F?r59 zHnC>~O0n}KP%Af`v={HL>oSD|%yGt#)gZS^l&V%f>T`T6Oi7~9l_W1xYZ(_HH4q&X zYA5_=-tD5ew$r-#B5>6f-nGRjz$M&?zn1#duixs|iX*YXtH^{uehV>wjYPvY5RP)# zkW>J4gGUuOagZ)`&||=a&sv+}uTd3PU6*lbfC>tRuN+QU-^uZcni_z$owVg91i|rX z06HQ`?UD57wl+jRM8|&4js}pCF5{x@3YPlr#Tn31dqLkC%Owk28oU$DaqdCl$896U>y5de)Gf?W#O zB2@(LyK@$fPBKdzGJBPw2kK^};KsatBunq<)sf$o{od!CwPjk>kk`jN{5Jw(;NEqI zt=z=72ycYpJ+*e;jJKWqc^&2d8x)Ma^=1tDG@4;A&EO~VR@2c zx`)aRAlG9DmesKeFgfB}CYzGuP{iBW=m8^e7Vxe+D?2|2G$mPRBWDu=M#h^lQxdK@ zPa4BK@doxONZ=DIuqr2PV9_#5v=rH4n^hr9-XH74ARZ2>Lfy@9kc2TL*KZ8@x(MAV zSxOg$F{Lm=lGX~k2NW9YGw{?QV`iA&bUeUKj^dU3RZ&tX(qiHP3lDv7M)lM@%!PL{ z;kk^DVh&gNap#9rzVU@mZ=<-V=rE1z=5M^}QDCo#e$}lev&uGElDz3k-rx7 z+fxdLRIL!;82t!=Ao4_9^TbT_XDXB*p?5w>I9n1DvWerMN7F?t_;;afkH3vQGoP2k zgo6OC_nBGYCp^*C9x} zAtj7^#795CI(1Kh{9;KW+$ej!Yv-tK7hHePJ9fs>H~FGUXDt zLbYov6kUYxpVTH&ngiRIG#wALybbl=uBgu80FxRme5*1J(M3w1I`Ka$<}0fGQUgL2 zOCL7PkVo^4ef&HkbA+><6B5uK63w#Xk!gxhr=3K4-bMK11 zO0TM|soz9usVJh!&g5=-p2Y?Zu-ovKhYZKhsgfUh-@vJTR-ftf4Pv$-yBfTXfU(qf zUqt!VQ4bCeDrFTCa9$n{7Bnkq9kYyfuVGhKokscjurlrcP7eD@A^NK0-w;TWjghV^ z#?&E8)?R&Fz&Z}|v9>Fc(%`Z!lC&ZhA{N2Mn7EYQhtHDsc&h_w*yl`Fp1N2R#wd0> zUXjI$ahh*RKHH6u`JvM$gK0I6H?g(gAIyOmXN4qE`strBCkP5L2mwtU!ZPg&QGD?v zv*bQZG!aJqz&PzKosPd;=zK8;5Xto$1-lt#@0T{9znn?LQYCI=aL0$7*K7f$c=nu6 zvuV$3`ZnJPU)7!_>jQbgTAsXJ;fB#oDjzbp(t%QvX}f}V!*k(|?Gn!+{+2YnOW=&# zgJ|WX#D-CE7U2hCDB=EwIwS7`Ga{M~&*_UveWW}$l@4Df@@_U+?MK(u`)~DEt#WS< zEGCZ-1BY}xf&g3f$(6Z!F_f(+MC)3^UiO9V;-;eNcv7Ar))ZPI#T@R~k-h_De;X?% zrc|sR2PL@~pOtr7z%VW1k@XI2CHcB^EUKqW!A@Xmx$q&5WyB;*ggiM!(?PrzLpYQ~d4O zO{m~<7JwgeddDG3e!2tpkWfASniBfyczrqZ<_^c=QR$tGT2pw(Of(3Xts1D9QS|H^ zZyD&S^4o*&Z!ajT=>oAqLzHsY2~!H*_d+BhI3^Kb@NA@`@45GcZDsWvv@#OyXVZt_ z3cMQzXKIm)7{~P6M^UDh^X8Y9bD1!AI5BlhMJZUi*&5*t*lO>QD8L3<4S3C+pW(J- zb!WvGVhX}g&))&d880E+BiNZO3Jt`);U=%qiX#-B{;;1g^Kfd^3yUj5w@4LlA-Z8i z2IYt!r+(5@EW-9A6i}t&Zm_Lh>pH)M8DGO8ptKAlf$s%%({jNOl97iVDd0ynxTScQ zP)_&7a;#Eg9=rR7l3u{HPI#M@Lgm2Y2cS02zOq?a_Cr=nMtq}mXf%o@H%kArGJqa1 z5RuaiPdLfbct{}xN{Bn-GcfeJGus1Sj&(NC#8%}PODj|?H9SV<)tpkxe(oXexHkIZ zWbIBI3#irhhj@WxHY$-sI%D6uWy3s3aDY4QFbSH-x3Hw8kZRJiyM%cuxBhzj3_ z-kPPAi1m75ajhMCmnL(z*s+qPmh7_SY0**4V)k z|8aG{_9t(_$09J`#aH8Q&VI5{NyyUHh1@Kl`PV;JFB8Q^Wd~tC>&Y||hZv3u2;pym zxj`~aclp~#-Cq@b5t&SXjxaP*M)Flwc|evCm#Kp`{+xKjS;qL)5rYDp`X&1p%xr^r*L{c;%G4q64pcJ-4WUr%4Xv-B?^kx6{e`T zzoIxHi>s(Jb>!jwBD?s*QMfn(pBFwDx4=9mPlGI3e#~vDi;AQI=ejZB{#P;V(H5A6N-| z81Cc6!&f3r5$GRIt;U>6 z!ZCsLaLPxbztWZpHe3=+2$$s7c4RXvrlg}*@&4KTU$gma)4$F9dB!8lYkbJkWJ&iC z6JUUS$Usyisr9B>V|^*ukf0BukOUcaNqalI6s0siNce4PQ1W`v$MJ;&->UQ!1E&nW zEU?|Qu>T#sdprEW5x+x`@}7_0pS6MtXL%Y84(8xZz6OCEe>k}r#z%f&eAM<2Ylai4 zUmoFB)s}Td`4d;)6r7u((FC~V|kZRL}_R%U>rcrHS=DO;a>@5|Rq{AbZrKr&?a z(AUw<+@pSL$y2@Jc>>02za|^%AdXEU6ybHqY^0PxQYJ-uysPc#+SrA78n zPsDxem82kG2%%n)`wIsT04RUU|MWdc^)Ow?CZ(TCK@>Y&hYO(yy+KjJ$wrv$;pS~pGsF>WFitj5KLJzN0zhu>X(&3mcb|4KE&@_Wy5DCEOeYw%%w(k6M?1n-qO9!YEr=8(S|xn? zP&`V{9cjMP6{T$x*}DKji8fvZr`ZeMPAQ< z7TAN9lF1}-*E|fikBw>OBEYx}0QUkx8_e^yb(xX{-QFvzoAK5m-QFYrn#|HsENj4& z+?h}a1Qvn!-tA*aW0iM5W-~Oe;^V`BYluOKy-zIc67su?Xgajforx0aKaM@XNUW1l zmwh~KVC+Z^xSkIl^Zeq%ZD&91QSLuDzb!vB8n_sf_j!@Ako8WXy=H#0P=U>8iL$wh zKEi)m$sr0=)EK~_5P!yIuzHMmJUGryeC;rEkp(H2nbk*lSfR?QclveN z`v$}FPCfZQ`dCt7p#%mRCcff*jE?6CG^966+M_E@5QR&FqxX-`)^eqJj|X5Bv$wVw z*B)HZbxXi2D_G?h)heo2p_!w5^EA{W8`s}qn9{VXUXh1i3~T>Z#r4z3{!ll=0EVe? z-@cGnMMjPOMjOss8OD=>Zhoj2$iA;xAI;o6^h=dvj;Uf{2hX2>PCNbfZnbAJKFGL~ z$V$@SXo;Nc<^|E!?=+nzXQB%@wN{%7mpcO)M+^~gBdsax2;3=?aXTi*Iw~3}#WL@E zy7^@OsU30+=&sTG$nXawP5^a7!x?-(5X^pr81?R<8lJ5%dw3^54W{>hiJvJ zBA}y%M5KKP*0G|=#xB+8%^klkwf(JZ*tK(;ySI*yt`Q>s7s9Aa-(I$TOi9yfK@~FO z^0^b%qE-`+PSx=!%Zdh-1&y4cq*n9nCzn8;L~BSJQQ6=G^Q$h3a>{Pa!4 zEpM1!iy3a&ku9%7qwcPSwIU;4k@@_#`}db1?;1zEdQX0v`@m|4*?=P))G1VPxcE{~ zqe5}}?=so#9xDU!$1m&m?+-8bojtc-e#;Z%_xKUjhpaB$e zi-Fo0N&+!;+`SH5KK?l+fQef|7Z0mjxlx_F9?SzpVXxve-P8~O!%8Px z_6SV`_bOBz+wdI+M}!>5m!}4S6YgyvX_HLdO%?Mwre_4g&TGT>))T_-Z|#O*?$nFd zfA|y~X84^#u;;zADwyEhd`-q86zBq0QKi^9M`FIT*wnh@HqUBEu;HF5HsCg)Y8~3v zx=wZip) zO{E!ZuAF=b_2%~rXj=D(O#^;rR9VHjdqcCu=EhT|6o!vm0NBfA$^d<;3kKn>G3G4t z!Cc46xO|H2S_U!*y@p~^88_E_We4`0^rm5q%EkCZ2~JD_`6L=MQo(R@{^obMK)b}c z3E2IvLSXIX%=1hGKbgmy=rlR$C|`NilT63QovOWO1oC zBEJ(>FrM<^yEZdMqJ}6FVy>&`q%b?XRiSxh%EzAkSPf)L66IKeWW8`Y;C7;RFmkMl z>jtEp;IR1b*CA8dh<1NPMC75?#P2?a;U?wwtkWC*s_?$`Yk5xXFQARAZH&?GZM=3! z#sTK^Z01J#@Lrz(ycZbz7g@I^$yL5#ULI-fh}M6~5*iSFiYCuPvgDtqLsfbCvfv>? z!hO+#sly5l$EVKlbEL?@dFH*Z&m`86qtA^C=1(D6JEG+NsVIlBu?#nT($Y}IM#nzX z#IkdKTQ7y&)Fs*Jb)7jrY5BKa(*Vys`cI&wS2;+Un{MP?Ycn%svO3$uH?0J(7W#r) zf*Yxk&91ooT-9s#%yG$P|-wdA?2%NaQsiaEk>;?Q(^_QgS|Agr9EN`hi^>wkhHp?*Z-<2daQM``EBYJJ>cEYI?06RO&?W$~QV;vh!Ulg!8*y(7mx3fc>UWc=rG? z-~MpZAa59Bq%vTpD0H<%}))XtcY1?ES!MMg$Izovd zy9#L?@1*XTSls?TMH(>TQpOJ&kvvo{#|?0RF$#V8IwYAWejn6z3r%(@YQMgU&1Ji*j zVjXL$hy$NUo?Bm0I|JLo=UTV=vIV6$ab|q#Qbc)?XTP3VK7T>*2C%zU%p*E6G^m=( zsj?vBS#}iuOYmw$+gICv-|V7Deanx9H0*DXW@?B0_q4GAaR>Z7NU1KqNB3g4DGch3 z83FAlwHm9p`nzDIbLx!^DcxlWP12;qCEaT5CLcTDWY8kQ?ZS1YGkI}oCQE?YKm?D3 z2(5Ha_@;4rm_T_xzF%m{-)W1U)9hj;efsQ>{$aKHyCT$n>IS;V^G|zT$pm?eKjL2A zo;x7bno*@lnnR7h6~^RL$sD>G@^n02sMa7#TvT+Dyf1vxZQjVUwCd&K6Di>*c|gnm z;oUG(Jwd>^t-)a@CPpt_?S0x!eIsOQ3+YsuUypuCE00UzxuFbvXwd)SW$9br{|)g! zD7Z&}Sz4fs{r#Ng%S(vQhsJGqC+?9YS;46AiyvB^>4D=-t}Tt4kN#00hRR9gp(g!o zNJ??}$C#LONPM^kEM&H+hvcF5M(t!kB@hnAUGAts?J-yWN*5{!7Se~XY8F*dB31zv z8A80?ExZ%?50pgKt4{z-l>xS95)}#_Rn?3|7E#;XbTcHr_gsuxIg&_o3naHDz!rmv z$e>-Zw-GF+_SVqzrNV|iDQn$Jd`z4L9I(6NhKn<2j1>EF%_~#$s8^@8PdL*QP)OIG zZ#WuIHT}M%ejp}Ru)~J-Wh$!^$Dq^->-U$ay-~}1)vwo|H2beeF`aCG;Sop8l;(`y z6Up$q@H2%VaXAjydh2EY1AMVP4~O+qy|m3pw@s}i3Xy?JvLxno<{G0x$#c`&reT@Y zTys!ipN@YxSDH`x0IVo-%GHR-5QperZ6j%<+v(*?FA^)al<}Nm>ru@Yi#R4qX|KYq zgU6NnUqhB2@iU3xDo0-%`ibY*w=HwSU@F9|GCFbl#%I_B{=e-HI4C1?i77@X&_78- zMLDM~rLZ0}5M7uLo^oJoIE7bSsQqcyUhJUF#ecZFmjI$2GHwMl-YEJy*0vGt6K5sg z=dPom1ykR{>Rj5g)~0Q0wg2D2K$JL{r+of31j9#$x-zM$Uuf7j|Fq$96Hhf#{AN{p zhMLm{s@CLFU6{2AGP=wJl;>@hShn^?QJJ#ULJzcJXhMsZq&#L~)y*?%cnK1wZc}N37@Z70JfIff%8fPluez@pYVxO$7V!yM{DibP2 zalmdq!Syun-=~-JDUuMQ_(nZihT7D`CcqnxRd<3Wk%_0(YRs>R-@zl7TpLC+1HgDl z&$S4P%-CGb9*Q_AxNXJSZ3kbP%8k%G#bc<|GV|o75Pf8mG^}9BDaqg}g>eX=xD7(M z?|49Y%Al~h$Y1-)Zc)e2wWq2-wdU3LslxDrKm&I?Lojs<3cJCDijmKnN0MFAEa5c_ zoJ$(NPP~10+I7cgQ+rfH&qXFGmINjQ<&21LJ#O^pOk*?}HJ>UQvV>zu$}n1hQwVP) z)6aRh`BY>HbLrgQu9E|Z3GIW((k(#ES51cNC0WlHDWY;hugi_MqebMiuBZH3S$?!{ zKh0c!^7if?ucv1A0BoP+o^d(;dCTpIL#orWs&Fe@qp%?RU%uDe`_kDOk)AC|_XBG& zcyq=@t0turdZAzK-M}i*B~N-gH`*}awtTN-GrxYjD5-Yt=IL)HT#KZWd)w)&tH zuvIed+r(^LSjB~HpKO-zaWZ;$=d;h@Ca^h{>-D`}+2Byra%+0aw|xB1)F zb@e_AiaJVJz%PSkB6v^^2?ncwYg}Mqn37x-wHaIr9~6Fbs*l2pE55-XibtJhs6SaQ z#XcC&!5H%qUA{Lvt(0tO0JGX-g@(ul&?-3RD5g(3F=|?q+rx6=P|a z{b zD(!f@)uAO9N$7&YkS!qX=DvS_e(}=xl#>I8@AidO@A0q(v-IB3c0@hozOOzGqv|A zOs7r}`Z_k_N55-q(}ETT&$Ycb5EAUoa^x2uk;6A1@$d2b?#eFX;RTAf(@dq6Sexph z2_7O))+1bcHk=Bh`sNckLWr<}X2v5mYeR3_ZX&7l$N$sqw`0RtX1Oq}&)q8?kDpfM zbip{b6H2o>hZ#q7TE(bty|Ybq-fQJxuZId(@y4TY^pA1pJ54+I!-iM}B!^C2Dvp+s zOTV2y%^UC!-*Y?~4}RAvkll2=Ol$g@8UoZtPC4V!K+vy~C#I9H7xGAlh2Yh641CX9 z78SFpCSJd4s=Qg{Bd`e3$U90JSSD{%p)FeBtOohngTzwI zuey5;VHoiIc1izT*4r2sg zfEMguw~Z%J{n{Wdg)G6u=1O2!h>oP5B~hNX^oj}8HTG6SMfV8tA3w|BI=r;Q1Xo!uBn=k7udKq(W(}A*POkKI+Wuv{4P<; zzl0nrZczcc2L8%MokeLL(9uP0zdZ@$=JE&0=~HUC{i;Gyj&gz{zOSu$ifN%t7E*@4 zWo*RR4xpu6Ar7&fatPhkTQ|~~s0$owg1yfusDwxKX+dQ)sq$ic{n6^@$*A4XyfZp- zUcaD9s-`@DuYNMy685V|gci6@5VUE5Ktnd38CbT{bW>kTQX}A#aJB5vPPkNto{uU` zlZ0ACmWZ6A-N(44*+Yyw#02AM6F<`$o}rS_Pp5R8yaZ*gkqxIv$<*@=`UV)Ja=1B$ zRW(u)mkZ4aImw(!7jzM4)tBR%63IJ6V%zWIi1@UdVTMjYERK;)It#v4WAAz9rII=KJj|T9Vkmyk;QyI_mR=3=C^56nFTO3cy4ltqcFMGYJ%98zpOpggH!fZJSHl1jEgC@ zZxIrh4h?Q~0_U$U3p?Ej8M44}Dq}uEgnJIWeRtctsCthC_Br*kbi7N&yt#)7D1+EXCHyk*C#NA9kLnA;?RU2u*_S(umb%Y6T@-N>~GQ+$v&>uihWphL7h`OJkuNT z>E66z`;QJ9#|G65DvRdv^Pkc;A+r)ajZyz|MP{z^;DL1;Nkbyn+N9?q$bge4!Aax6 z|CpNDMkBP`voXlN%Uf%p2`!kG|EbNUW%R{to8#&jSGxkpS7-!)hiOMCMU`h&7y!iM z&F0Ge?!@wa=s^y|7IEo-Y+p#WwZ4wH{RDqK{;-E#q*@@3EC!)%R9 zp>)(_%4tS;GSh@SU@_?q%Ic*Z;tuyspd_kEb#X6UrHL_q5p&)A@ah7Kx^X(tMhn0i z78qPv_?g3=<0)sV0v@0loc^3i(d2;1f*f||BRMR!RKR+597i*_245Rp3qtIO@GJ0$ z+T?NSj0f9P>)b|HSD`m@&YqzhR;}5L+uT!1NlOYu)-`sVmeIgVeZSHA{*xMiO`yPa z?@(L`%{CS07I~4L?^3%0poX8*GYASK>J?*FaO;B0VydaUygGgV3IAGD)Yx(-z+p9c zsBY?K=%8v>7mm|qPe~HFN)%@1pgZ*IAjYU_Q_{z9`r<&)3Li^Kc|!=8ntRNE$tPrR zZ{~r0T4O1g`^5Z6^nM^--oz+9IO7S_KY6V@%G7d5hJI@5(gR$ItXg4k?5~+9SQ(!q zT(GDaim0wY4o#b{uIG@GlEi&A?8#&zNsyhw$@s%(xeR;dBG{#+Go%2Nk_!&}0e4m{ zxAxWmpp{C!)8E!I*SHk3;va=F@&4U>^(X6)bm|RKZGi?toV=zch+{pr|M_PL8NsDJ ze-MQsqIrV9Q}kGI4_>Q9T0)5)RP~8SPVs&I;(b1llf5eImAzXdYF()u#LrQMFWhEA z;bI#qqEV+(YG_jr_7d4X1f`}cxT^Z z`&g(-O;NCYb+6>?n}ggYAjmQQ&jEef(Cap?^bs~6_A(Zz8#x>-L7#*0x50+kaM*d2 zi{mgQKy6b!Ku5xPoJY1JQD@;HT0wY@FUXUPd}JV{$#> zzJ3~^&ZdsN^Rc$Lxk=CTK?+k4v9l;WP)t`r3n&c?u{U$Z1-fAfMqRADp-VJS<%l=2w^LY z)pz+sI;&uL_(**&e_k(AL>$mIslEk2Fwz5qw%=Xmt*3Ex%P+#DqM;y}ZJX&z#i#L{Q~v*Tg`F%`qyQ)Wl;|P9PTDbCJmmGsDKE#lV@5?3}e z9khD-(>mvSVd98MP0Hb<6OV*_PRp!oz)(*=GMJ-=GI9MQZ2 z<72sC#HYr;qfVG$p`V=9lg%-8Cs**;yZO95>?(?3{cLQ;?@l{7pQT9+4RbB43R(MF zY+Y-wd-9jpk2yWk?vo!ZcR2$o%j$KaL8>RRF5el3Bb|T~gTYYazA}Udp?+06QRkWM zk=@URX-34w40O@X`Bb^a8#5o|;4KtnuJskjkpmpWpCpu1FtV|O%C z?(JX9d(vf%vfnVrHe1Z9OCtX%)w2gTt8k!CD9f+1`+@{8y!0Liw#K3tJ+w|shQ~mc z=tRUcWbz@Vhf@ss(5m7+20Mw|xKV zYu^+a4nI_sbz7+rIwysnd?OJXjYM<@s>Ix)=*v|O%>auq#vtdA zTo+2}zuN_wE^>g}3;K42Cs?cPWR(>c%Zsl%=Q%%Tn!sc9jCnnTh$M3w_m$X8Q1M$Ll_51gXC-$E;w^?0#lRp}mT(D-pXVL(8 z8cD`~GhPuwJ-6@izp0YFeL(`G8|vy^ds9C2j#cR#vVDJF_HfMJJfFm(->mCnY0`_= zctmR7h>BhFGqwtjWB|hj@G^#`hi+`5-sLFo2OE=h3On=H?Ix>gi=uhT;9+jIsEuC@ zr3RZz(VpYJWbP4xG;GwhO&_9z*bF+UcW$=?!=*3vq&5?Q0rq=6R@WX`JkrTV9D^VH zK)S3Unt6#-aoQ7t6F;=V&P0~!*V)y*I*2t{KQn+ZhNGu?IOA*#5)e;E05b$6N?7Tu zzz;J?NY*u^q!K}wQyc}qYP=ME1K%x_RX`g+(4a_`ve)cww|8fqkB=;;H)*c7D--se z_po0nOi1$ZOy~4MHFab|+pl)Tug!fU3r*tK;jENh-qJF4(y1K986p1>h%7hDRCC2z z>HKUmc}Kn$Oya5&*Rz+@@2CSaFv>E{gz`o9%xeryrmE{FEYQn zS|&z5V4kzRLh)Eh0Zu$z1r7)1qmTrbqWJ;Od3w&!7)5tFq~P-&vFRKnZyk<`sm&;6T)2+W0_fqKICxzw$_xB-s_R07elb z79JH86_N3p-W9r^P0;Q05venCA>g}i4H_02Gbg>|6 z1_v&G>c;wbqJn_uQA@>e2}9$XmVl7LwHboX$%^Z$6z|4|IOX7y;g8uPGB8ef#p4nZ z$`7cCM;ygDRM}Xdh=6KD2=!jD3O&LDHD_(85NjiK3gyIh$|7?B%XLbnFQ)%qCcBG7 z-!*V?9KZ0fM==~+?JunaBdW4LG6#z>o`05|qzFJbDUeE-7 z@Wv6_xnV1;c3=H8Kz*bTE@d1hfmhvx#=D^oB50@U(+MmyIRJO)`SgP=12c=S+jxqLTFfeWphVW1S@6&TrIaX;6zs)q5>M$C zj_etKY|xxw(QtRnlW%Kkj_Go=EUly!ZktI>M|DrBiwQv;b?XwVirSu4UQ^GqD0wfg z@=l#%ikd&xr@NK%pL!6r#}Md{P+U{Bhi> zZfLCG>~)32{Ca!*NV8C}0+~hJ09LxIAhcWij9uBXA(mOd7fY13x~#2I`M^OUmts1fObT&5S7jwt-NpK zM%httk9$+XaYZzRyw<4&GMPQ!Bew+?8>c3|$eBJfWw`QeYjmtjEjFJ{KSqe`2I{9^ zUyP__t?R8c@amZkQV}M})?-8BlXl=i*NkV$w0Le$@$bL0v4vx0Epre49qW6Wg;xp8 zrPC?}l}u3xUXbMwUJ5~z0My*A*eQY~s4m>^xr=^$k8wGwpEsgBII zA&PDcw#kc8+Vp(4PCGpXg+7iCSig3do?(QV<<|T=?t`$}3?P{pR7A|=r5{!v3S*N) zs;B}bnq*&wG&x?0oQ`6rTbX3~ng*B4C2pC*bnpz(wPp0K5pJ!2zpn;2%g|}53!->I*vw{E zv4-C*HG@0hMZ-39{LagKu%lggSPQ*#r@MYXf8H~F%q1r98mKU4ycad;fJ*9$OtQi4 zkn<%2a_Qqud`@zhRuh{)0_GT4^e{nt78-}_2cII4b<2r_e`v&znb@A zH82}1cUrQNHbK65EP-C?rf@}7UUG|*`uyF54N^O^J?cIyj zC~A2!kjk+CEPuGc3>60t(r3MvMag*sNYSxGG z80kt)1#-{;?_q$+3$)g2o#)Ke01V_N$fD50`3$gvVb;7nUTADfFE)# zbfzhbHfooSr=gD81d&jrAqNPl#Y=`JeW*0j1XXx-gqXMvuPRIV(Lx*!yd6y+HgYVC z_RigEj!|KVtDdL^v!)|J@AKE=;36n#sP`DB=%wmQclfqVB>~HsQR&bkyd84m0Bu~yP0qL zp5DJBHQPHp-TG=XRAyYJKm-Z?Rd155l3eMpdwzq)6>Ij$i?4L&qd)P^$p`4$l@OvI zR0R6@x+Kw$sv4OTQ7d6&um3WR+OGphg&I$9uH(ZF2jx8TOEj(+m-!*UG4wr3%4e_k z(Yrobc8c)@Qy5*lG*i%OTinv5Lnjc-a&sGche-qs^ovYGNF zmOgU7y+P2{>cjJKpvyLjrjcqGGuk9UJs^4x3x&()=-rX`iOQaH>Y83g6v5%7SMJgU z4t4Bjg9zkt-L1TJ@?GvG1%fEDqsQ z3l2*prrO&6WNncNlhLtTWFnYN-F_U&Hy$4#mAclDkgo0;&N?N8tz9gi;AvWYF;+<5jum-lTNmncOaFXlO_ec3){$}?td|u44hOSoUYsXs{{IY2rpZ`#%MXSM3}-NvV7R>raSfvqidB%B@?VOsD1PJ; zT);S065HaARnl`9zfHsvZF&^M)S*Kgp_X@*(=v(MT$t0p(C2`cr6#Vpe8tB>)bg8bsgwclP=2M@T8BDXwDu ztrWCB>2X*|%1t<2lTg$ubSN)12{Rr)!L7D#Wde2d;4q1ngK?{?k4I#!qi)nj;K=xN z0nTlZfkr-Kb*Y~@h7i}l_0$9Fd|wSPHQ>JCJ;s*PT+U$9hXVyT%H38PH7}bCaHg0N zn;MHphZ#8v74~IsAtg0#lgL`ZhuwdxBIeurlMy0hWc1y586LV-*;~231XLCy@_c+Lb3!`o~@gv9rETk|FWPL4O$eqR$6LO~m+ zFsiIR6NtlU#~y-5(jPOZRGBUFixc0<0S?J3N^Z2X#EwMrF33~$$3j;)a#X5Dj00(Z zL!9b3;h{h4Xcc0k78W!4dq+@-%OPhX`Od9k2g76HZT=!b;4(cRmMb-^^*fUmr*cX% zh0*t)qJ+C}?v_Ni}^2OrhrPu|5(&QQL#;!pn z{v@@k{}W#=(8778-W2=FLR9$y?u^F9X#K0?&U#cMPnFqH(1d##1M2PZGQK})TcWWW zE6$Nhykg53Bz9RLnfm=EPzq4R2t$jOGf^S z!d?Fxug_MCsx{sX!!~>QQv%4JC?_*LsEeo7f(+PFdSAghxJjIoqfUX>E48+F7@#e; zbPD|NnYj*3S&I6A8g)Yp{czIo;cqKiZ(Bz*S9h?5sdE?kc+X5x9iRVjJ}=#ON-+wb z23CdBLo4KsT}@u#>mQgeEz$)I+eR$dMX0Sz^>_%$-;N(|VZX25^5L`!iM!}06|K_G!#@5#-%nOJ5yOx8RWiAhJm0^Hk#TQR&^^x zq2o3ms2%5_f2WE4Hbz>m>Q?I(DRc?nvD#UBkaO^ZGftwauI;I+9C*NoC)CAJ*X%2v zV#OfU>I$|TL!x30^NsQA)QE$hTB@>$LT3HF-F+vF-IURk?;IW?8?|ka2ZUXrCmZgK z-8_3T#5s}cjs0)}B(PM0@bo{+k8Qqt%zS7J+YA|k33b-(Q!Z2muc(g!3c4_|y79km zElNfsz-3wI8nlIk01;tjvhbdxX7fEB45X=VAmtoX#EcortnxsSZTA=-Jbe z)@vqGswlV5$_ST>n>R|D$fY}e$x{(ZOs|CM)MB9S7>p?sTx^Bnq@;n+xp@VND3gm{ zGaaIR=PzSS25<5E&)I5^?;Fj$diF) zjL^SJ^+somx*!dl`&jW+n&2;s`_;dc67|vJ_Zuz)m5ys_=mmUj%zQw4c^R1j9k|dk zFhMJ{m`JtLgF?$vGj8TuTSTaumQBWdZ;bzZh+pM9-@i@d6Yb-?;ZLXr|2LXsszAkv zM(yn#pMJwqPC;gT+_4w137{!j%dc=ivwf%xiruB&8h;sxphc56ThN%NWvt6h$=_mF*=uR1qh& zjohl?78j`k)WDYr`+jkW?7p|DfHAb>M7>yAh*1bv%2eik=9AtzvbB=OmK+14&7ku< zU-I(*w+63JSPDBSM@(i@2&rggw4*tp`UU+sl*TNi+8D{{6~=f|b>{rT!i-08V4nIR z7eM^T{UQXY7>gT~j-Z$z>e(uo$umxvPt&>eU|2Z-nFqO4RM}1sSm8mRDYbGB6mB!+ zsY;W?n$2igwm{b!N1LA>{dGuK%FZleAAXsi^Pi#7$_*N@00fx-)Fyw0z0HQHKW8YK z@8>fT(#>Us>VcoWT<9JaJMkzvwxCk?BQ0C{)_@Dt8h&Hs{;4Fh#8jYMRy7Tqjs-dZ z8(0wYu>ze2({^?3k#G~|S89L`KS6pbR8KhmBq@GsQWPR1#lmQ!*ptNqM%RwJ{Kv%E z{cxwh8DyqDRk6cli&quPNfmaB?3}=LRjyJrk{)bHNL~ylw{wFMhrRoC;N}ONG&KFE zfPX>W$V^JQy4T!{<}#x$tbHjE`>9&eueIsYFYM9gx?U!J=43WRPUpCT=YZ0M#0Mwa z2)O=UUoVJ=PBaStv%GcYN@`a#F;PJ&CvIr_&SFRO7VP-Q(e-J$_ozzUd9Haz@U=L9 z(etyKZb@HsL^)f-eqf3rFu@CTcTXz5)`AI`k~Pb*$U5huaa>?|Xd51J>%6x5_?S!h zjep~D4G!_-qkqS*jA$|Bc}VzD<1z0z`N9)=_xFNPf%xIrJAHgWm-O+*$X%ay_tA1=$h;V3lIG{bfU_UEexB;3>&#!-eJ4MoKUaS&o!(tRZjl!xy z`OOlx*VPqF&)s9}UPTRg5n>P%+>%W!O=!z}dUQok;mD3*w_IUd&ErruJvTxUrQcQ@ z3bJ_i@$|i=W^2Ys)&ITcVz9h^(}ViM*iQqFS3d)>TB8A*d)szW@so%Jd4}f@3&r>) z--m*HElQ2^dCRpkuO&(Tmuqt$LnihKOh9Ael)7Wxv-nO(2xs>1)HW?T>863_Jb3%n zAXxm4`WLA#G(PvtLF$yHfL)Ot=>pJ8@9XKKNH?U~E=vLnFWtQkH&SNHpI`PQ9hkV|)1R zvKb!vj)kBUimBx`pbT2x>oqGqTkacp@tc@$>x&IRDZOX5Fxh61^I-;IKI#L#ga@wb zHh@l?6mbC)^b`#`P2`i?Q;%PO5S&!(u&E>hTN06ql5K31{iL&`g!o1XxIL->jt=dg-Y!R^=Ne>=4L> zhx0S?3v28@Dwv_N{!ghyPH#EK)F3bex6_8*uHD=Fw| zDC=`Q(L&czM3CPi^2i|R>e5vqorKVo?@yEMrv(r;%J@@BNFX!eHzH)!br*I8LYD+k zlI8jorE12Sr!~8?_4v)XTt13<8q~;k6JGnG{B^)Gs%CWK+F?~Qn?b$|;b2FqLpi=S z7mw01gtU7XTl%yLynYbEq7zU6|0>pZh4*Lfzh2(T<jeGi#M9-25?w*ogKPB|+f%%7 zg;7QV6#I{NWm%8Rq;HJQ1Fk3uZQ`E}jkWT`>7+YU^@`A1xAPJ-|3Iqn>4i~rAQNt} z*(0?2{=q>U3Ge42iD>p7%I6nd-vkIqeC5LzUc!4cgmFu8CLpg9cJ1MayxgYi56$ zJ1#(!@8gP3)*hFYf1{G-&db^Ql`#%i#9agVdqxs@1kyx@Ev%rL4X(sNJr2h-*^8Xl zDSU~DPv=_&ffn^J=afk*fJNLR!{?@d1y11e*rzvp^s=#Gl>DU>;p-fXswnc3hxGR28EnfpAyo_o@=>00xy*6`#u%%> zZ|dUo6>!zx6`e%?kr-A;q+taTY;M&BzNQIk+V3@-dxueYH1_~tz-s4K&cZ7b1vq*m z+VcU=451zbVE|b!K9)5>sUni4n{=}8mN$_w`~)^ZYNZIkON+j%l4%sKjk!r`%F2r zBX_y6b$u0^7Ra}&1$oGkd&J*VhJ(E%jDrgT+8$`l&- z8UR3!0|Lc zV!|sOO(p_Tv-|vp;VWDJa*=g>(VL$nxaUcaQ-Nx ztuOG{=2SQ%L?%g=iu?Fdl{{g?xHZTEg)!gnMW#PI(B6}Fqy291LNE3g2n%V2F=z(q z;i~e%4^_TZM$1h}AGFmf$=fM`5Wvw9zk0HS zTae`p_6w6ozkagz`5J2wPmg(Q-okNX>kXG3&MqMIu`%&dZLcP!?L7gu)h{csHYs8Y zY-pMwZhL-ZTAjlvcy1(6RyF1!MTm=js-l0obQ_gNo_K4jEblzFs_1p>-a-r!ZF1Y21=CnsZN# zEn(X0U7Ow_#3p%alA`pX8&z8xuH!pHrlIoWPb9oMHmd=yYnYbFnnVNBbL`wL#Y1T} zEx+|kqaK5`Qk$y}TubVIvqm?PQgJCfnlL0{bYC}Lei|F!D-&0x3)dXi_ivZY#I!)9 zqG2}9f{MI?itB@bbw-39dnMh}e3{duzFe9IK0)3kaTv6CaXQ7C;) zbSXThrumkdKR|_T;Z|M~)s#_fNyPY)y=ii^tF46aMttO`rVr-sVkKK;9ja)?p70}o z{Pd-B_M;{(T{Ye)8MwMpe|?tZDtaq`Lj&5GHBqV|NBRDE2FFZVz@i;=};C740;_`c!msf zj7x(683;^xNAvF>9{beCSbqvKD=pOYORLSJ=j^sHsxsS~lmwf>jx{RRSWc~cXxKgb zK--GFYK1Xu#E56dLZ`Tx4r#YkwDhe{d2t5$N<^dwlk%*cy6KfJmo{BxdgQ9yJ&hmH zg@d*zvN?HywNgrzie~ZYFm>Z->8Ue}Ml_Bhmfe6Us2o=C&KuaP%%R(Ocf*s5T|;vw zSH;swPguVJI(KA`X|uz>dH4Cp>7OJ}UBV83Wz7-wJBx!sP{anjW=FQ>$}NZIr{+8W z6%mwI=FYSKidL8FQVF?sxZo5lxO_3)D>xQJ*c%jIM>(F&ibQj)y}R#Hd5y-24%Gt> zzSTBw^_%d@)nwO40BW1-*?$WJ->h@_cs9Q1Vu9zHe%?&F+~OeWr@CAP5?M#@#$OqlSWqkje-3b!j#fdi8PpH7Cpa zDf5awxwtXAeFpqZOp`6r##4kfT2S(*DM8!qxuxOX5h=-0n`QQK!-4Qx@Gnyuj%S3k zH5TC*ZroclQQQ?F(J&Xwq;JD=Hza>sPO_M6&Ok3{y94$iVuwzAtsdNR2F_uiqXuv8X|$gBG(A+H%}l~1T_%7fOZfr(she6>c#EjF%0V21Yz z7v_red&UE)ElRoPzEVgLA@J-~=(+k|{hX=X9C&GJVPj5PWZZG~1R4jdwj-NTh#u^X z@JSoNTvGtyYCkqqu1j9vMrxp+prjxTR?PTD0LJ`4Zsq|8lI5<&>y@btkR|XpnnN9~y{LRCMFlyzgbqwZcM|w<9mQb_Xj~>F& z33t*3@u^oXK~`Kb$~zm2mIZ-kuNZ3@)nbmbXq-xMmt}6}J}FETw*VTdM6zj?*AbB6 z0ysEB<5numQ@bBV8?XLU;l6&0%}V2Ytmo}T7YD^My9ih z`lTXFwz9MWW|d{0_D$Glm2PSU4LS zUmCz?=k;|ttHxg<{770s6!45r^UE_Woyu)k1#NW8BxZ7zoulAG%&Ds&yk2h9-4*9h zMNK(%3e?r2D&HtvB+a~UR?;#Z0UzPd3_uJFHqiB(zpbggwe0T#|_|@Lu7ORonu?m!aUSI1HwCyJ&nR2BS z{L+apdYfz~WjWoxS*hEexAppM?tZi<&t296vQXD8z>H`IRm|90G$!+~dhgnmcqu57*K`d3nQJAk5NBTEQ9%F;*G7gf!={w^ndd2d2E9D`^@O6SyZ zCpnxy&ovGgq%(rATdOtP3^@tr8tu#@6Vq0mJ^L&pJJ72hrN-}k}s|e%>`>$zdk-h;&(3#=z$p(%cp>mI?F)GE4&&9=ygnTY(cMCJGnw` zd2QG3RP*4b%R!R{c&zyFGIEXm6Vq^IH)(+5efkzb6A4Kx8|RaUq{tJQAYFG-_$F>( z0I!KrwgJL$O6Rw3gHQ(}doxzE;ehsBIY7zuEV*oyBH0PX3o%<`I4lOQ@K?d9RQ<== zL`o_se9mmoFwIGP(|~Z~c$5nxgeCg!&L`(bt6L3jObbatNH|MuXP-BSaI>lZzOOJy z&o}VBDUl*OwZ(}OaAA++!_{~OFWE(|66bR82}I8YS>NQQN|BaGhj{9-bfOq1+|f6i zSF<8{MG7ugB=W4c2Y}aM{@9k|&+WW=b#l5AUH!R;ro5^tE?-$ov|#Fiif!Y6x0T2r z%v%N%8-E!)-@*Px)|Oz)(BJZg&mI;u3|aT*y0>=wJtN-l&ul(bQ(S{k}RL)`+IHn->!*NedDr+Q*Kp}v`P@6 ziuy*6Y)3{{nR>@cl%1b4Iz#5Ta|4oUkOy3fK+$l^TZDkdUNr3BFoo#b#Vs97HnC_P zg5alwHbH!bMPSY;{R~^K!<-zMKmC2I=)FyzVx37-`FVW24{G76eCNul<}p9NU`8&U z$5~u(L+0q!jU1DyiHF?tX7EP}vYjg6smy{q_=CN??6!(mU*vLS+=r(4Iejg84d0y* zwIkZ<*jq%`HOi^-+!+j;uA~4-2~8JuR{(%mZnPlAkfPPdDs6)@P3M7Ea`?`_T6E&{ z^!Bm_HP)8*6F<3o|kyws#XS(t8m?!d62 zslI@DFutw_2t>AtLnys|*Epqm#t<@<(dQ2zo_|nW)G!uhK1^1&2FLouC%Z{4FF@Ty z;A!hYuU7=8HF22*q>7^v6_Qm70k2hD0ajkDbkL2o?v;T$ z-1y(Ge||eb*aIr!?=kU!R!gk47fz6uG3&sdsVfqA9>XPpcG9E7eu&muQ3c_w%d)HE zB8n9g=sI!jI?v}CW7n-VeaVYmRQmTv)4>!`TiQ5*Q{9L)KRZJ8jMyg+$4iS4oFNc( z!}7sxAQkWW$A`!?68bvKU%=$+YIQ9jc+!SyJD$1ub3JKWtTTC0V3d4S;*_8MflZTe z3lCa}>lWn^46Z7v6$N!vFigL?Q!7W?PizR-If*Et}>=oHAJFqS0mR z^RA|C#0t4q!3@l+`V=xkt6dhz{{jZE`&&HQA}-OC#D&2`oM`h0XeRxNlyp7;IaKrh zk-+-z>z6knZleiLeqaNpekrs~ka}53TvR<`8!^+(N!S2@^%!1qVxhyP+MHOtg1^&6D04EyUG6#E*tnRpA{^APkxu=;z%q#5fW)X~c);hlaYK zP9BLXFo2Z-=Q|azqp;KhjCNfqO#LYcKs$?HiTDVd_to2F7h3g!$PNj$9YJVI#lX#x zfc>5P2oA&-qDUKwxC{2U6#zPy*K3qvjjW~ z&KxzEK0V)roWtT?NP)3BPf{2=bk-U`VRwHo>m5h5*B}2s7Fum#>&(J!2-QG=*H}8l zA?vzex##c?_3I}rj(N{|S%tGA#=1pf@Ffk+z%!v54m$>PTjf^Z2{tPEk{~L@!o%Q- zz>n;fZ{4{8UdqvU)B_4Dj46@PEV**%6NC^dE!%HIloeJ-&f70h>yYSp7i`5bQee+7 zPHm}2U#Ze4*I!IqDD_ANtphNcdh=#rvCR&4%#0rkc^xU#yY*0s zCKXk1a8oF{uK!QHuY@I(EE6sJ__MdmOBK@+3MHm@KggobF*_~>T(Z#b?0A2J63Zx8 zVPOZJ%Et{Po^{kJg;J^$1Y_GVRsiRU6SJ`m_K%gMwi69E=h-cioQj(Bup*{#XAE0- zO-*;3HovA{XwP zGbN7ku^v;ye)#-(K|*P9RU$FOp+g5qf2q!uKn|J-fB6HF95|`QTr-aQqU1fLtN}`XsPoCUeLK8M9ZN;Qz zGit@BwcKL&fs*jR7rp}l7%QL>fwHyhV*?%O8Uh%h>dF1x&;*~HC-ri5?|II1z4xo-T1-7|0mwbb3v>^@ z_iGwS;fy;2Fa6^Wk1tLUq;b0LQv`y|+f-E(0=Bj;%{E&MOLCh73ZrS<#?!CnomXPn zTj0^*Dmr`BC9B!n9agd}ZNiaa3op}z7ayZz=Uh|(H_k-Z_E*~fKPc~?Exmh4=E(;*Hw|$YV~X`> zk1|@P$=fA0L)XZ2gA$5Jf$!TmYlsU!*&)Gx^bkHfh1-J)s(ssy3cMGD4nG+*SR`S2 zsZsvw1Yw7IcK)EV(x}oQDtujg>J)H8I4C6}VYmQayRUdUcANWVv`d5K()-o^i}elf zco=9|Dr|FJ(T9 z^ca~rwSR0pmo#O(^`yDd=7-a;E5TmbQ5pL74u%AAB$T!l` z%6l3#gtrL?WG17`oaPHfi8;x%OAIoopjl!qNVD3R-}PQ4{V8mvZcyF9(W zX3R?)I&kCk_7nriXe6^;#Gz$f^*8sy%9He<#6ooB%3`dQSg{8tu z)zrbKBi?ghB9$P^8&gkB30cF$Eqyw1_1|Q*bf&hRsNyd$TrqBUJ7Fa%x!Bao1^p^@ z+nOJntf!PhotA5djoGniZoZhKNVSWE2A#a;p#zEOr;`>ny&2cr!+gsQky685Wyc<` z!fT3)`*?Bk7ot>B$|=+s`D4_eNT*2slW!PiaG>&JtU0F~iF}Kal70V9-Q5ag!(95J z@k4c*iX$abnw;HQ5*EGS-_=rSNvb#^NRw?k=NtH*6KNzfrMw!eA`fltl1(XyRyv*m za_$xpuhw#ltC$L_@f4jA!9Si{%qpOMJgfaqAHq?T7h0y#h#4~yP|L2AI~~&$X5p~y zpZ$9K@zMzu^K{Q2v4#uQEP==bOax&8$WEoiGOjQCzb&q${qT8V1VAPns<20ZR(@L# zUMNS(VSQjY;Xxja5{t-o`ozkGelkWnJ!3YsmLw+#Ux%jRjGIsHw!FLfK=ov25;}z7 z_R|+mEqvd^Bc#!<8HUKHLAV793|eOG{LG5!$2l8!PpdPimiN(aKo=r$vc)WGL_XGJ zBP2Ik7Epx!S_;_6VO2Mg$a1$a1giihjCfvhwq~U+c+zHrvUia8)ob@=w6f*hu1k~x zU_qbOrsaGm>+QGSe-hUhDoB>QK?>65a&)ac$m`JYeu%MU)DIaDNdWPiWE(Pw$L!Kq zU`@V0T0qb6JoJTv6>&~#e#B{xgS3w*cy5SzQsGxCHDw6W5wM|}M z>%DH?Lw*)L&0nmOl2;WWEP_-o3}{G5m6$(qXZqFQ*|_YTfQH1kh8no81uj6?S~`M( z&j|BWMD0W}jkG3@jx4LSD>;htlx${)YZSuq84DVEUcQY(F`e*{+GYpcHG~LSx1VC?BDj^48u%_4C; zyD!NEW3i4sSlOnctag_aEG0^@%~+>$cy|QkaObS0-U(lF9Tb$Fvj7cAZhE-a1>Gf? zRstAs)`?Fl^je-5oz|^M3VjcFhYgEp438=Ub>YU&gAu>d7zxcQ_SVeE2wo14(l5N4 z^s}J^FjuJ&INEknp0CllfL$*u{5-?RoObv+Rfpx~c8Mv8%2&u*y+$_LU}?~s#Ywb05zLHVNHo-e*W~|^W(WnqF#?2 zg8&uZ8eUWxR3mP&Dmiprvcbz-XrBwU4#7`~N{t8E-cr8XboY)&m?@!`=L`oskw1VfmRwLDw)1m&XTbK_u#H-BYmAYBf$OchZwfHSo;u2b3lX zMi9y9SjVH^(|xi{>Hqw9`p3!l-b=_HjCs$5!|({*iiAibuCXzF9j}{Y)&>ayTT)SB zaIcnlVC5zLao=dhrUcS@wkro`)kcnb4#Cm>#mXgeE3qn>RYE*l!tdjCicqD2$(J_o zJuofit9sVXr7y{xh&OdWx?ip)wl^uLyt;XS1+Ffr!+~CW_kP6yevZ2dr-OAjORpRhgKc zbqEH_NA0NA4`5n&y7=<}20Pp;_x+0Da{F9~?t{df{DT=0haR<>D4Ctn034)avUbl` zCA1+EYl4ibRr{`-Qe4_qkkFPfxK<3-cCu@C!AUNCjj6d^WX5+Ga6!^n#Ea0ods%ba z#e?LP(?bemkDd)SyEDlghC2psY-d>N>%XQPMn%m8j(e_}OycGMX9G4YEq_vo zuY*1{4bKMK<0GfbVj|rtkl(BopJ?q)f3u>}s0}-WSiQd8eI%qP&xw~u7FX6fGR7G~ z);KE-)GQ86U>W}RindMRu|rzbi=+NuBO1B}RVT5PW9XP?NF@W_2;R0#$;#W7yehUP zhh|P)y#-W)Pz1tBc?sqqF{wq?Wd3INR$nfzP<`RkBY(7Me$y*n_Uan;Ftrjx_&S|; zRkpa^6H0ueLN4|%^tRJ?x{)_5tu&r8dF}SFN;6_4afE<`@~#g74G1Ol7MZB;2Wk4$Rti5apWDpLL4Gy&zKvV!HdrhX)3E&fiI zjUlt(vMqE$2IY-W#VF${(R2oo@%1P#(YyWotj4Q`pXBxccNO_|Ny$sQo|8F`{Y2^{ zo%HQ;ls$-mZw|q{XIsCIJL=Yl$0;nNco2g0E{eE_pT8aAPvXp1Vpjj zaNzciT)`1wVk#X_oRE8FkEa;TwGJ^evgIspD3nPXBZR0>`wS~p0GvBQ>ih}O6O|>S zmg&TiTjImKxGxv+P=t5+64CLv#T_$d3hyH{S(k*zKQOMaZ%g*gB^JKt0cu+v;A8G7 zBU2#$m#X6ty15k{{!nE{vkEZdEpqC}4B)`AE)H|x!e%h6>zv#uT1xdiEs29#vcC^A z<~J^6k^X)M7sqWh!>g6dXlFKDMMACXbZ?flm>fI76_)&92u0pd28exYz@tsd8Oqdt zoh7LP$4YJR<~W{SkS-r@DVf5n9lP+c#--gtE?tHoFlT~BVrQy>rIFgCzIzzmg~fe+ zxAQM)QA0hyhY(uNZ$@}=^NU$*Fb7Zcfk65rVNq|2=>!F&Qwu+*d016t>Y>8d^qD1~ zu`dGGbclKG|QmfuT?+xUMMi;;cvZh-_`5JZT>tX*lh4K`?j^qb|MuU@m zpz9fpN*==~cImQeuYAN+vUuGA6+WFL#pQvUt?46udiIQ=zKPb%G#dxfB^gJqlJjp5 zYg};(hg^YYL}$ZTKLb#v{Ypik$X=VG7ax{JpE#=e{MIiw59Ud2)u;bP>YxoD?I8%o z_qkOQBZH7h1V?V!NT~aH+6Jsb&;P#veZ2U&3>_LrbS=$Wi}Yyg{}j#tv=Fj1Gj0-1 ze+rY7s=>-e-_?2r1BRu)eZ9BLd1iv~xKjdykCFI(B=z`}eM~u@%rycY`iKBY2X*Rx`m1ZleMvi@L~ENyNbo`^R`zh@`!fwHq`suEB7@7x@OLT%BjKDokmyN-Wu@0OLbQXni$N+*J~S$Gjz~*Zf!pFGj$6lx0gj{o zmkT0Sr#2r4=TltEkjz)BuMjkLDxFP3Jc7ge{xh6l+RY|PqwQG)31F$b(0x>gDGEXy zhJ-QRhlqF>GMH8G(pQd{PO!N)aNN#jKwJp`!2tb{!<3Bo?OF$p*9I~z+z1Gw9M8T9 zfU-@z-0N;Vr+Cb0X0_Rz?FEHhE z_-Xp!tS#b84ljs+!U+7AT6Ejb1zP7Tj%o(P!=ytd>`17*Jao3!-rgKCEq*ex?A7_l zUz&UxDUVe<0Y&S|mIdbf8L{wO(Ho>FM-Eq~)mRhwm}A6jS~kt?Vb`8UWPK^l3RWwM z7q@Xw(qcb?cnWc(=bOgepN3-mL@Qd^8h^f$4Zo`w;!;IbQZR$rrhuEdBJGljoVaq? zDM4;gfL*`D-pm+OFYDh#KXRTO5Ud`M)XK-DePbjoRUHj;}jsz^;UpHyG&F1;Oxc5yh>-$2dz-Eptt( z5tOC1#W=dL71t>vVPg_0H(lv$QfAayj#sCo8(XT+KE#XbM1eF)od~b{@oCRfiL+dI z{Lm2L-c+rb`5*zQHt+lX=6NL(bb_hAXMiFc{qu$>I@lLBGLtK+^B}L8wWHdOZyZ*^ zB7kHQpC|3tqO(}zJ1tu$vF_B0b(8y2yNHY4RjsMqZdmxgmFKkP*$F+Pta}UvKHo6h z#me@VW6KNdogwg~{A#pyC-!VACW39S2xtG(Rk#Oo(d#U_C2PVd*KQnrmZFD_bb7#k zy?aO3-}tx_RjDYMyC{bKYV=y3Li87(v>6|?)yHAxJfq-jjCa3p#n|hm&uu}y#$MJ% zGGdWLhNxD5X?=#8DW+b`&$_@+CoP~r`A_6VN4!Spo= zJ-jW?4>dE$bK@$J0c|@AAX7EvhYNbEfv*693H@8xl&KVh zc;0r8cgPm5P*$dOLVEeOVo&-wREao1GUI3szevd5;>44J2`KgU-^`_}#MK4L&jaU7 zIld&9sP{cwm=?a>b%$dKq!}i?C4H$Wu53ybXj6zw?I3UM53#~Q(%2(h`e`p?= zqVXG{@s`CXzk}t+{I&Kkp^a_uNYJ+k@ONZlem&}?D7;JF#{|E}_4`B+I}DN;;@rmf(KZtS?Q3>3n^sweWIyd0JK8>XYl_y4m1p_W`tv!}cw(L` zbjNSEhYEGs)NM3vV#8|Z1%ExVTJO(Lu;2BoAVv~2lFR4nKgq^U&b*lMVq zx1MxTn`bh?5pCPNM=qUfw@-ePUeHpKjXAR>w${v!3qh-jh{KQ#BSEGs@D?8HRWnlH zbP@`42g|*a5m{>ceQZMLj5Q}rr_z+^JP`9kyye>|tnEn9_^ws)V2Ri02vb!%_o+;H z;m}G@>GlI%@5R+fxvd#Kcydd0)9G^t8IkGx*vLaT3jGC3s;u)PlkVNlLFFTWL&}`J zGGjVgyZzlQkEuF$dz8M11z(`zM%W>A*UuPP}GQ9=A`o<6vYVHug3QH(~ z-eC@p^n`1VICT~aU1Za0mvZrso5$CExx8-9#juyZ%TC@oDSfuFN9FSV@zA&z z2*@$8*uxuhY8R*DjM-xalJrsJOPOm@Yh?URzKowlhI9TcjUEg5hn}XPJV0kEAv~_| z$HQvXU#Ib92Q8K4QgH~UD3%5%#@USvA3{71s==W9OI}@J`Vj(Fg)Y3^++!6$``7T& zS1K@AW@&d%US#~_dE=)Q&GUcCy&sO~Ij4^y%r>C9WZ*OvDuC$P4+Z=h{wh}E2WRx^ z2g_4twK{-JhLti>I;oZ_&M;mTrdEo?)#$sj=07Q9=1hym289TU? zTrL8RMs&_Y|GQTj#;763X|zO2li|nC3s%72^#4I&iQx2UR*?NPaG6U>u*(+?cdowZ z{p>aHvgy)(9D93;DH6Oq?3VfjbK%6yYl9m4S~63e;msssg;cZuu+a3xiT4t7B>)sA zj0?;<0`G-AEnTvJxH&`M{NcfUB?TS@;5IR}a_%dN&-F!AUrz6=I>rItYGV1*=kQs%RFqC`)ziHT3T_l*{h;8W7w~<(mx0uXHo_(E*#u%@uhy!KlTm#-jA@Gof+ix z=)Z-VFQhM&^0bQ=#-0i(f~{8b?2z3o?;@@|VSx;7U-8=VS+A75I;17v;!$^D6Y)tQ z$*U>erIq(5J!<)Q_YJf4QlOaRkO1>AwTY_{{S*jclFG#9&jj0_vaKsg^&+ajUw@>k zrU+uyp)rE%LN3dRnkz6plktUWtCd`r^gFoE?iOX#>|vxl4z zaw)#|$08P(*s^?klT6=%3sBTQ7~^pS^okhxT<{laMQVXPs(1rw;-Xz#E|?M;!KH(=ue+$M|| zL0IiB)&%-dc{3a>v@Z?4MX9s7ArPLg@ONN)excb00&Eywn%;s6_TOE=%w1K1)!Oc4 zrS9e`9e4E2KS}jyfgD!%;AdRL-+xZ^-s4l%P&#>6Ncx~{=Qg^`4=Q{DufJvmT9H0lX{K} zcbb9Q!F-l!!VczC1T#)m#z76b=l}j&h86^bg1-mlF-q^1;8f!K5JgxpeY}m5#3Kl& zSfiHaMngn|{kvl?BX4ykeodquyA{_R=2YBO#7l^=zntYOBRjNnk8M`PM$C+f2E&+g z`xNhOJ0wA6UJh!h8%ER4@{UPO%$F(wt@ZN$9h4aTLpZcMk9MT=6^RU-()r1=3%M|zXMQ=iY<6=n*MOXx#7+=1geBe$II*Ox8Hbb zNDAsrc1-O9hRYPTh*jB&i3LVBU3V<7c!5c#U1}pvSz2E^`9 z-&if%COI`KQQRcc2%N-;(6i}VbdM;8w2z@M43q$d3|G273lkXi1}k&!pD_e+&5?iIgeC4q<+HHo!(7jVji}CJ zD|nJt6u--_&OI};zbuZojBUe(<1%!WU7jD6~jq`7W0NgY>_Iu+1T|30?53~Qpl_Jsk|T<#qQHePA0b}z~#O2(kaK0<=_nM{%giuA~(TpZ0>_$L00C%RivGK|vO#RrMOx=Uqj=`HIOR1YEJ zBM5U_K2Z{xg2KAwD!%!jLh<$PQE)^}@3kN$Tk~AW1Cwd{0j1kXF6eG$oAm{8F{;cS zozN4yV<75%Hi<_)z$hn96jyJwI6t0aRn1d#P3k6Zn%W!}T^?Ga^O$Az@ZMq zCqd7J4+Wv*F=#&Gh*14g3hNLd+rywS8q1Z`q1O;nT_|V{DfmHiR7lM zt`K_b7ds>d<&?iaC}KaB-o1hpN4V{{9vx%Nz*CIz8w+VC5%xVNzxz*=|(4Pug@g-8zUcKT=icBeq-2f_kB0=%z@SwZyCCMalwRU@lI5w~B{q9JeyV z!Ox$Q?QPyhZEr_m(C0y+zVQnAJ}4yG*Zf=kjZ~RO*y?B`JrW&kJF*KKcDb^jhhZ&2 zl&ql2|7iN(X~??^y?IB8*$;^_AFPqP!n+kkk?Pfl;39F^^LH9)e7cEx7n6#-{5tzi z@X@@@@(C_I@EMx@eakAND7A+)9p=^sV@$H=-z{(64bu$Jl(?SbNtfoOJoHml-8cc} z27#v_NEQ3plUyj3*^43#0k=Zt`}QhpjeTmh^5L0Ho0W*A-*cv_w|49n7mexVz|^Oy zolD~xE9PlJ1rFK`pQ=(+5WV}D3NI-dfF9N6MTah z#LZ?@3eu0-%mgQeg?!FO{|oURM@@PT)?}`Lq}&U#gz1@mm|f&PVEERhW;WKe(bk+O z0e~?PzN=pUFzuedq(RvIy-WPDDNYPu68Gg<9&5wigOsPn()4=umn_{CvGGt0fjn7> z3%{S1(e&m6Bh{EFI}uf&bqlz1VUkEMsLZQs%l6fiKB7jZCS_`S2>V!te%=_j3r`^6=R(5NfNKdTb@bkdjwGqlTF3S- zUr0<51Xy1-MyqqBM?+|O`zo`d2v4>^**JFDgAyA%QhN3G5V>H?lu6?Ylz_D{m8BOS zV{Rvf-BEY+V?M)Bw502l!Zau$NhmK5iK2&kX5w0RREM=4XlTK@G~%_WEL6z2NAl4|5+Ir mmOf-)@JP+c$yW%^FUm<#0n4z3+VI zdo^CX%p`^bK>&yJ06{<~rv}3ThMQam-+Q%&vIFr?0mz`t8d; zx^0&(e!aWt&uuo}+m?Mb`s?4Gy}o?3^w)30m;c-A-}&#SE}H!NYu~=Td3}9*@29q$ zxAfcFUV3-@Ie(7*`RU6&xpdF#ugiY?y)^awIet6(dS};`d-L1Z+b&+3{P*&vcv`{&E@&z`pYHuznm@8!$4uWnyorheLO?VY@5&fG7( zy*8b>X}>MB^z`1`2lwZ{wZ4=0`}=m!mbUu)`&;_`v-Z~C``hX3+njvQ`faVazvXT) zK4zaiJMEpn|Fyafb=p4;U$@#nHqV`Wy)*68TSE`a+R%Snao$$>I@^ME`tNRspQgXh zdwHIg^WTRrhiKpF)XMLl&t0~UuwrhVIG`_|wtsDUXZUjX?H%yZZhqYN z{6A;x{hyBgxww;VzqX#;Ey~xaiRW+K_SbCm<>~A4_4bcSJOk@-{+TO(WtSmET>X8% zx_d<)9|e`ZPp8{n+52*}AnhI!+)Me}qVo0WudglkzT9en)TDR)`1?G z#22(t%HK2Xr?slfuD0^Fp;btO+Fol~i1q7k?bBTkTgtjL%$}mRqk*X4KOd%@_s^tR zmt7;ia?anmQSaB=Ub>Gu{ZHY`_^KlLbI!n3T4R16?h#t z)X2+tLnAFZWB0`>{WR^<+fQqBSg;DR2P>kVqLmfi+C=@d`fJW1F~jHgub@7~>qgU8 zxxmGyhT4i666daBwiXC2#N&^zZD}Q^rB#`wDUFkaIP%6fj)rE9zwW)Y&x)tD+*|w+ zy17kUtzTEKw=o$VyS$hPj`%flKH7H9Iw8j0y&O{)UcS9OM~R%Q8jTYMBnhTfG9n); zamp(?VLiH^TEa!3g{WHIp4EGxz}TcEIs4m^+JpAuz3m6B$v5=+@3waNQ&{JLRWU?s zi^+cjDYF}XZhJ*GWqQMjqxf?ETK%)@IkCu5wOB8IUXkN19_9j z#z@WvD=C8XTyeiUYS6bA*{4H^iSd2|B&V>?-xZ*BA>-EMKy97EuRue{<#HnJTz$W&lM}zw zF9txCpDjn;0feR{uy*y$5#F|XHUs%-zt3B6jW<9ak|Z2!MGt+4$*YgPE?$Xa1JQDp z|Kgo%LThcuk=j-P{aU;xd}$)1w`9F+vN_Gm44`BP|IcI$RuY!FG8-H4wrbLu>$Sk)h_Yo>jW0<5r7!Xa*Fr|WviOiOKGDNs--dsDAy&T zD8mmUo(o|I0ib~H?(30D{Fl&S)=(7huYIU{3*cWtR*c?vEA-FhX?w1 zc?84|_eK0aL9(fyrWAHS?Z4 zuNcq-L3CZ%H@SrfT3IolZxMIrBOqX_TaRP7Ml!*X0p`VZ$CgMd&l0g%Oh1p{GjCl< zQ*kbZqikSg2_FOYgM*Af4~&wop9I94YdN)VN5{%w6o5u_zX7D7Geq}R@mj&G2*!HI z)iR?`oD*llP0vf;nVWG{@RGJs(`Nk|AqGc)VhyNeJ0Ui zHDcEBHyqUaW?}KkFg|0MNZ}6$6lyC6goKu2^043g4bD>aIHP#7%kKab_E9+xhxW#n zeR=BwJI`-ekDpK(|C7UbThWT;ivx3vb?yrU1V< z)Ks}*Dpm}5rf8z=_M6A!C0qldftYP3zr78pdfVO*qZ+-k525^qo4}sld#-`nDUwl( z_V4W(vX0`bqGo(xh{%eUj6%KxgSYnia7JDBRP>fyp+7zDD2sc5o&<3rg*#G!Yila8 z_a*{ePBB<_;)Sm^lruzdp`?h}2%qtkw}}#kj!^d48MT76Bg9UDmx&X)2-Y$a#3D77 zOaJdG9z6A))Niu^qJ#$P;(&Y)vf=0pn6?=}n*QRglDrx~MtQV>AI(q-C-GAe7U;Mg zeyF(c7q?6jzE|U%%4@IcQy?MkJGG+ajEF3|E$G^-m&|zDo9|d7jSz|idR7L+NDCDG z4r=8p2{aJWa<>L>%T4oa^0j)a*Vo$u!a@-gM>dcS)Q6+Cn=^|eLfotuX4K<`SNNkR zKmdu!nqHEuu+6l7I>c*gW8w;&`hgR@?QvM70Ycl9$?h6+-eq0(#wpEfZ=Khgih0T{ zB9XEn;A0`rjaj5x5Chr_gG4B_vx?Rz)>qEg;lo4!OK^fFs0t;%zS=Wl#4UUQQB7a` zYBU9<7WOMWb)BUY!WMb@kA?%L9h-5v_@CI#gx@E=315hjwaAsH49{oMR4_h3L><6KK%>9%8&%L>ZH=U$RaTQDXElmK zkDS!J_DHx>QW;!b@q=SG!j!d`oh9%L8vR^wyGW-4Zwen}drcKrBPE}|;fw_A@=&!? zGBtq94+{0afFwj%Gs{$`E`zTBI!pFG(ldgz$BH6ElhlAN-jZL>76IVhw_K^%VNMY) zLvBL7z0!cbM=|2}*oD1PuLc!3@lWVbN+V$y@I(xG4TXI8V!~!tOLNHcOq$V@GVa|Y z2>?fJE4*GrdUie}&8NBl@6{x_9QqW}aDwUZFrK2BLW0u$Vck19oVPPb*u^3m-ce)= zJf-%al(s6F3t^{vpt&If*r;_jtuabf8_qiL^4j)yLj(NxWhz=oPo}z$6drb4qVQCf zms?gZoZ+nOz15tQUhPOtM4VQWDR2WD@xT~GGsRSy#IGnYr$HYf8AYt8kS8>8kB1WC zdlj$1)gPmgpYzIyAodcE9WxGX<6rhsoWhH%oQED$L7%QlM>*<4?HR` zqe0EQbN~Gg;_X34Ctm2fKr-l=>&>DE$Pi;5t9zhoS0w&Q6vYp|Y^)O_n)C-+FnH~B z7NuS4fUZN8)DL4`ZGuBC~n^B4ITOF4kmC+xXc z_4T^@p>DgKufgQ*a1)a%pvP>o2-^=j0jbPrMCAz)pM7WTX@8x#;9yd>xQmf)V)@GkOkl;kR^e2>P<@1Mx{3yc-MsB=Xi#pMbxJZ6 zeohPNFh;6F-m&2v80ZxLeam+kM}<<}gOV#|^>ytzo~fTYFhq_3$(;N_*R3mwJ(KMR z4B0zpF5(@vs+EjD;_RbOLxknV_HcSd3g&!PQJ&Jr19=ckUnfTr=#BE+^@Fu+l2 z){pUFnOoh=WjG<;-307_ry{$)h{$4$ImYT7;4HxU_d3D~xvx>c+q@$7#K<1Hj%IK@ zGpcKDNk?$5MIF10P)TBDc>k|#VpP*iuD(72+%^2Sl`#ud<;bufX7eH&AejF`q{L%5>j~p=` zT0A={47_Tap&#~;;kNcsPp=FHBo2ypWt<+J(<2ZFAz*@sB=`wy#F(Z7`<(frN1$yw z-b|@h{*}3$0MQCbo87H-!o0j3{S+%4o$MgiBy#=zE ze}|sejJqq2qMQsMWo(9sZa4Ge(;@E2`^pUyUv&QU^`K zB{`et!!K)Oz^JfLf}f@`)%Eq--zrmo9Hy>o4j=V(T%wg;b-TkS^DXFIuIY8(KqjNF zdwyq>@{8MIzN#$Z;Ynb47v?ZH3Jj3qNTaw%&hEKT?nyOq1@om0UTEVD7pMNE&v_FR zz#e4Q*GD__jrN_VM@%pVRH(Ma4=47%J`)$qLw5$MMGsF*ywDiI-})BGJWCErPE*qp zrpT5pSSQWD;~fY(VJI>OBU(B0zK4i+aRlBX8eP<{Ek^YToQq91E!V)WWsEA>BU$2f zRmNhVJd7TomUHVF=uRy@#4MLy(YkD>&Uao7puLbDF0Iz> zc7uL)jkfqv;B5{!`D+Om_W{oWGVRY#uGY72q(v&ANfc>Hq;AF`Ctq3gDKzSP%Q(muL5hp%j9@#&S$^MK!c?Jyj4 zWAeK|sRinvGf`lZ5sVYB_i7#-fo`QScg86qY{+A!Jd!!M?ryeJR< zcNk4Lqf%*f*B(?k+KqiJM)AgU{_pu3yLS4r0yyxpVsnXS+F*q96FKNxRR|DZXOAZ7 zFE2xbg**%iloy!p0clc9IzrMyEyTFPWD7uToe7dl6~gp`jSD+Ssnc5HC(~InHRjzl zH01Dpmc4p*94_lEYmu;Y2R8x2I$Q00qOE>Qg32L{zee)<>Z}9fT2GFe_-*1D<1~3! zx`{?#UeoZ{5hpb$eAAbjy!?P=0DTIv1Eer5}UUDa3e5EbL1 zsxh)utL@sNlt{V~=U@)G1Ugm+2PM4}QC6h%z4UI{y*Dn;IB0gSQ*v`!MQkrVIjmv6 zh4j39rjA04k;#NrBJCZ(;GS<0r)JG@>R^T};&JL|s^>6@l0n!%t&R6;*D>977I>_I z3JiHe;9JBpS_N1;gM`h$NZ7pE3lNLqnmYuOy1N_eaMubgm`V(1rUU+N1$7ms2-_A@L~b3u=(<#Rf0bmf zKV6d5gJ@MR_3e(`?cs$dWKv9V>BMrXD9?&d1i)d-nbvWxK#Kol)j+8@C6&zwt}owk zyV~GY@n;ctUxaI#XgJ>d0UlMTGmd*Ou+15#TTJQ+Z!D%d^!hnkuUda`inbG9dnA)}eo$ z5q-k`;?-Vl${P}>)ZD~c-K*a(bcqIoC!~Y@4?Q@6S!#r`m?byc!_WD0XzL}6r80H- za1!XEL2>n}$Ox?=%dN7#`-YuK7*31p4S4aGoW7JQO0`rTNu%l06RH_{Pd9j$#u-75 zyd`Ua^@^mmo}!WTSLX}V9v;LqOVjhDCdjICPnb|amBif(f%DuDdG*bAdRbwq9} zC7KH)=eq~bjvhwciRO}(x-+}$fh+j3Rq1jiIYQ9Yi`*=mLd z<`Ciw)zQn5L|9X6)n;ih$pOZ63tYwc@tP>=E@71JN(a#dBI6SpPkHAVFv-)!!cJuj z+VgzmbymBp%Tz68;D>eNygaBmL(qi~v4@JG-G38*_}jp%d{ku2Kp|*f_LKxUw6B(} z4N~H?GH~UAu?e1)V+@Z_u|>vdJEhKV(Y%+D#}b}Z*kl^_+!52soImZ-wP1MVpqI}ZP~qed2qwlN?jum$}&+g7XRoMjRSd+XXe zbTZ-+XIQ=n2k>pZDEDm*i2%4pEaCGq>Ql>=$fN>SwiwASSbRHu*5H%dC#U=Zn;8qd zprjkV9j1YjNbFUSvk8@FUA_UiE09LXYRV>3c|i>2hAncj+;Ie#$93|81a^5q89LAL zIP^C{jUERhT9T29*p>B+CFe=%`cS>RB}w?s&=QSDWtqOmo;3`zc4C831JhQ^+rnEE z92xF$WmOu+cZNY#;HlI=;ou|a(&X3`iTR0@unb3dk#SIx>KvL|YJKa-bPB`{He*-Q z?PAAp5m!}tH3tR}C%B@_(}*AgvX@k;omMxlHYxc)h=EdPU5}lj4Nrgw7(A-|1>CJK!`bFsCBQEeZ6cfw?&G(al>U zeWB#qPjuKP|LYL6pUYjH2)-#MsF(c1Zx(|$Awc3ju8?(;WFF;Ko1lD?9X3sYpv7Vj z)4_QEhYAM~wJav>$oVjV&A`WC&`#5a8ZWTv-He7KB~=^n6jjr5MkI_@uD*7E62?GD>M zq2B?Ko2p!$E~+Z8pL6^=RGrwKn>8SiO_c7fYRBN+)o_7eGc2n*>C3`>7;fz(#v40% zAPwL6c(lTh44YL(VgfGRDD+f<2^d^5ZrfNA04{ojWJ!E$*4G8srcPEoStBBp-z{aR zdrxxdC`w2?mgr~uJ{g!mjdVwBy(H6FY|OdgK#Ej3a0s??yfhl_ua%=f?rtoQ6|*VNP47&zF{#Dl!ACYuX;=6XLD)$agvK*j^SW(Zfwe$6=@VMK)s4k z*5qbx6Ah$P;+3m-u;rxLW0w)J9d>jk8PJnuDUk9^M7M4fSN-sdxr;9{I8@&Ws>S=7 z(zc;EN|_6zrnz7k=)M*nfV0ZVWFTHFfCOc&)2#he|19roUNzJp55V z?j!2I%&39`Mcc!t1^=;Fl2ba@xt#CWIn`L1LO)u`Y%r4X)_gVXI6RA}bc$b;9KOf_ znt{srtG2bp`hsfQ4T}}1aNch6EI`Y(BMl1GhTkdmC5v{Y5hptQ>0B?++UDqmc@>i4 zq;44jnS)R}Y5zAiYqFr4V!{^juxhj(|>Hn8sqCAE@#P~=SElb;r&?p4?I5sk* zYg@>v2qGsr$zZ3oXQ%m_(#u99JBO{Z>7lvK`32;6oTM*0zKydpf+bk_lT--J=4}l9 z`eQRKRO)rk6B6K%SjEc9mPg%8Z+U5AXvyNGEHOCNA}&-kz&Z%j(p9u=g_rm?)NOq% z2(?kdmgf{{oVu|zfqo@vPVJiU1a1niN-|b1sID z1__hAvR9@RSDl#Hw15RtqGJ~WmJZe07@?t{3k6oE69oyHG=Q#(Mc5z}QzBABVc*Iy z;7JI@+};yeOLfRv#L;e|4u>UiSYJA#OfStEG`{5JF~}3CwYX@y!>Y@5tT>EW+A1B+ z>6+gG)J!O41lVfc2Pj*_t^k*_xP+H7-Odj#l>~KAZ|Z3X2Qr(Ho048l9hDNIX$|;#V@g3g6Y>)iCuPW%4ch1DGBwY^5ORdK3)t*+Zgab_7K2$QmR`c{@#=4CO?H zBY6C7_0$2?WG*&qhFC>0N^+edH#-}wm8A6fvQrEkwQYYB8;cd`YAOUPnWQ(}8}ok{ zVlD|^y%iO@3pmBN0wtNQC#JY%;D}cjE;Y{;{3VKwj5!JiO^~BNXin-@-nfqyWE^4ml$t{K2$WyWhan7Ub zQA$1;=yi~hyk9Skgo}uHWSkZO!f;^y&nrF=UZULuXR=D`mFP8*)Kfc&%kpI` z`lB;p6P#d^B-wNXbMvF9%6zN_cpw;#047pZ^My&7T?|T&+%R&lCE=wr?7%rbvX!H4 z4}4QLz8IDQPiJ8>=kXZILqLOb4ME+0lb#L3nane{z!K4-m&mgs_LHTuK6bqy8FK~I zj;QIOt5}eb=~PGkm&Q2$j`vw+Gmh0a##bG=#g}KyhJCIbO-6|C#ylBcIyOd@w?K7z-*~hzW&HXXA@{ca2PO1ce>==2ir?T9t^|p+Z3%H; zHl;I8m-TW0=X=qkYnLX%;AlxsXh1GNEsFfj#do~=RseBZQMZTD>(6Gh0<>bQvj~() zILSAs54mwNA~Z%aYMV{g2XZQFx<8}`lAEYsNdeiV8DumeQF=yeixEZhxRTG1@o>(? zE&Gd%`%@BA*Xx}Rh5+I;pFyycQ@?(CbFl^c+)f~oDswBt9v)*}8OleGu;PA}lZD5T zg8xfyCp7`P-qxqD9lSzBV;War(dPPa74F+0$9va3^mmgMd8hi!Y1~XeiC36}hH$v8 zxB=NXkhk<9(XYGKW``OP&+A2|-)jew_5M|Nl5w-Av>#uZTkr@fEZy!{4IO|D?bE{a zvDYM4=X7ahCxRfmjkmj#&vyki2UEUEcn4U>vkMg=`9e3nHd7Xa$*W401I2O_Xx?4h z%|T0DuB&;#J@I}{oiQ9><(GzAQ&>J)`-qVf(9;TvS43`ZP|!6rJ8*fgLcECX;5uyK z?)|9_o%~&q+_^&`EvcV2TPE{b1I1vMKt{juw<*!3#YF!zc5$@$9MaQox-TKTTRj3h zNkxq!ME9pYeIX(~CktuIky-c*bb~2GKG_*B&DuAx6Rv4WSR1$iAE`37Qi*66tku3rZv3tBtVP9n2 z^4#0Hy)dt(Gr*`f`CpM-t?6$<|F3>!tmGE3fz|ZnvEpwnclD@0g=$NVQ{ROAE#AZ zoKBZ}da~+GFYmR;#1@xuB6s%AxBA)KaB;|pI4)K9H)cO%)Ap5}9<3VRgb#_8dE%)7 z{d!AS8hV>Jio_{F3ufuXpphm<{z}k)YCef=AP4xv1cK4K$;zzqfGi>}QU+{#apMVR zE%B%$1O*}KsL+ynHj-Ckl+Yo51ha6Zw4{6B(+ef&De}Q&SE}2AWYgU@6OdE$bCj!H zTOpf>$g&j6SaVH5arBoUA-7_>ir9ZfUHieOI5?ohm!j9ez7ld83IFC-B&!2(Rbw*V z(}#%UMz?W@G(==swZnl#sE->kt7SxMY`b%+jCfkFlth91{r@{^eD)THa165Ko9v<9 z`(blJ0rrCt_%FMU7WG`LnIce`6+Na#Iamo(fIT~mKemWIJ6sSJvszw??Fp5!-PaoQ z_3gp9*2J<&WBfUp!Ck^G}2@U>-w{im}F<=>*j~Blloc{ zp1M%A4Dk=M|wR#=98YIMQVI(W~dq z;?cd!$wR&1c>u;}za$&zAdzO0m1JjJ(`8~&P@@?Zba_~t&TCv2=(Ph(377cUQk47$ z-nm-A1*j5I_ZWV@VBX*X5BY!ihpQh_1>FWfL1HAZZQ8>iBb>H9jquv1xH@8PAmqxE zvO;>GG#C?D-<5W~!?QU!HwMbljBo*qa1g3^Ze9Ve5cdRwGunqKe+4V{HFxSK_)g5A zcWNZUPbqUN+sWU7I~--TGNUUPY$=}Hx24hoR#X(B#ARE1i6TpQu-T0xJLy z;ug4?l<}uwi^_X}p&rdNM`F;~6`I4`Dn&$@47=&+lZ$PMQ0ocdZed6Gg@)})EJF`= z{NVhzV+NcmxQ&R1q;(Hg1Def=yIjg|*bnOwB`Bl?P;oGMvYm<#O)-k00DsEk zSUp9)&mM{+;B3?&Sr0%^>Vsw+A^Q{kZFC^83_k?p*NgwgVNyokh7B99pr2u~RtYtd z8YS$JmE(mVz&JX;yuJRj6)TK+8GvzTTec9?u%SAeLhu=7d5%GSLaLm-6v4ZA8f?6H zcVAtG14{8$y(F+Z#*J{WaL<{1tr`BuQP9AzC z3bI#QN(Hd!5zGBlSgflu4DY&HdiJB@8dk6vqSdxG)+uIaX3iYPW42 z+p!Hpr`npl1GTviXHEHxqD~)18q_pVIEC7L`6IW!*1wHr&DEnRIPdNhf!B-l)HHPzQ*ie8OjCbzm6@6T~LE zwEPOiOPI=7;ld|o<=H9b6(Z8CPWzT^d8CJAu7_Hd2-l^*KP>*cM#wg_k=ovq*Pgz% zrmJse)FW??2=3EMuU?G6m(( z6>DtUkd}|JD)LJcyM^g#&J(aY!GvHD)z;ovdCLDB*rxj?}QQ z!JY{=z&3$uS?k*dutR=}@B!zW+slE3_fpcG+~I(#N9;}f{7*GZ@1>lg8^NaV>kZR? zshGQ8bX!%_45qW7XK>J)hs6wcps>cXuOn;qhDNWiX6NzMQN;v{@^Op8W(Qv4qssm#|B+I!*rp?3HV&Z6l0SXq!y`VG(B7G>8LaA ziM=}u5%{FNmJe|+9=!K}O$)znuazO19p?vWeJu-SNJB{kSfgSsmM8)Crj`lAUNPV- z`USwQA1vvRPWxSpXJCuiTqPq(y>-g?AI>Av8HOM1ei2bAiGrV&Alh_R&|M>clRz7R zbQ{?C(1gkp6|*~&0FUP|ixugik4Xaiq?tDBz*Pt-&<D!9Qs*o~87^E7xHi<TZ_U?#6u@XLlpslF@QT zJ{&p2knps1{1mF`MVn%5jIHnr@wulqDPVV0PftU#-p|{l3ExDtj2MJOaqUP;A~)YzZ8f9`cYQ3g ztyU>T4PZtj1?{N+YSfX}w4!c)k_nblo~fCc59r&ep5)H!`sUUd!eW zmB-Te5(@fd9u#atdaU2OQ2WhM$mijmw(6WMqry=UDA!;>oO6}-gKO|*Vm36@#W{=q z1rXV%mzKcU95vQi0w$G_u?uZ9ca2M3gzN*io%=}6y1{RS*3J%+H(sp+Tc z$<$QwiZnW5GVWa}1oJyx5WVP(0DiHQHyRs$i!PqbU&- z9bV^=;8O!0f}}I-Sp%988e^ zj`p(=cMTGE!1cl1Ff0+@!As@5Y1D7HCd1b4EA#4?*HdSCRw%4j%PHuQ>p+ zQb!F#rYo!l7v^A|)MEc}P8rkMAHj>jp{?53P2|O5>9fzgt2b$pfG@16J72fIHcG?N z_-)8T3Xb;uMqaYs|M_6gUdv$~D6(Zito?g2&7Dht&yC|fUIF{8B%?Syj{vdK;VDIx zjjiiO&m;c$NHp0vo>>Ve>vbR-6-k&yK2QS`GE-G8J7`mS-I$0fQBZapA(9YwfUC6{ zP!KCphp=E4Qc)vo0>wm7pEs3j!EeZXqf=NdJS}Euzq&bWf`^Q{y2L?c9le)A;!E0# zk#_nGk{H75mhMbw_9qoUyJOc9PT(HewGR?pWHHG~&vfSa+?}aL`WJj~k!B3>k;mH> zY0XdD0IyrMZ;G^|Ac91^_fkD8iYQK;pTk0Pm03nybQu9(3l(Biaj$tb`t#}8SLp;Z zr6WD=$hDWCZqQ@N@Vf9Jv>oI_wNF}AzzR3MiRQi^)meLZ%~)HVYej)wwo;YVkabV# zETeBLIop?98JcX+_MkANsi_;4kNBWhN*rlgVHu$dZ=rp)hSEv5)6PpS)A>7kbe0v< zMX2D5$4(1LPa6B8*i+)G^uC809`G}W;V4I5+Vum^yTqecJG}u+C3wi^W(a*P1frAl z{^q##4kY4?5ukpPgG4!|E~bDohJXvO0rSpC4O8zvfH=Y62Pa*<)u6$}e>%I3fKthO zD@`YgK8|IrQaxcdB3*uY>Nub&9{x|MaB9lUOd8co|GK3GL|?a=K>*R+=SDJ`vimn+ z_y1VoViUp~__)ks^?NVhYq=@{xe(ZB5!DGr zvLHNW*r%})j|}{7SMJ2vrpkH8?nTO)*FIEPLBcF5(t-DDAe3(h6Gp?2etx*f2Y8$qS0uy%p&~m4|8k$!VJJoq+N5tF3 zk;d04vXcPvSZB`eLedDQnXBlV!CK+1i@a?OuY{jsbPq8w>e8eqWGyh!^;sp!Sm?>N z<~R?9N#`L!0)+$zoRSLzoQ(g}Q(}%jedeA>{`1X)?_s*}F$E3K@d&}tEF|v)7AQwO z79MzDCnc&Fyh^Q9&mKY4GbO#>rUaOhSQ<|QLg-^w_^liNR$z! z3|V;2TGd%bm+z<3vA7s&AzfM$!P>AI|{@!>5tVnQ#=4Y#Ah^33Zg{`HG5 zh0*5y3giA;w_x>sI_IzzvfQT|oLfKPRAbu~L!{HCd?+n*T0Ag}|9xlOI`%b@ff?ml zE7#rj7CdEbSGtvE@574Mj2wuSp0p}(Bk80)D6-g@u}3DVm}-im9eWgb~Xf#wIV5S>g&ofYrW%9NC2WB zO|T`04a!i2)HAkreY2VQl^%f3DoQBc)Kj%iwTCS38Uwo&j43X{82p1o;3hAZE&@Xk z3+525vOuyR{nAROB~;$Qon%AGmWf3@?9R1wyKGen^5Z|@Uwv@P4+cFI;5hvRC%y4m zgt|kQ90EY|oN*bLwT;4U877(`m8oW61!f`JWzHP+FG<&#{)%5%g0xtV^S>M0j0#E7 zlizqwtHB@Z>(o9v2NpARKb1wQYah({Cd2dctz!78sRT7FW zAk~jZpLy~Bf}A`ILC-+OYC^qDEg+iXo>QA3qhn&Xg2IlmOArO{YvLz#0O5!uhVoAU+1G?QSYWViDGZ0kWwdF_P8d?G#YbR!Cc1S8oNm%4;C|r z@_5W|Su-ZQM+S2w5{{h~VinY}3tWTiLUB?<%Zv)MUN1FRB&Wb3KOTmyJS;)=_1fJm zIPNQjq^2m=KAV~xQ)sFLTXQs#S>a@st%q~H!I3ae2f1{Ik!uA5+(Sn-Xs8aZhy+BjvzVFno6>OrF3XoLD7gHM-P{$# zm7?lC5X@ESK|K7ki2Ju z1<)b$J!}|OuT#F43g3)J9sNz+Jufq;6;|~1<5i|rcr(hwu%u$r#|(xo)Z(S7UzL~* zZ!-sooXoGfdkn!6k-5kIL5*e$Wd&Bxd@wkIHNEGWm%cH|$(k@)_YP?NmED5+Encga z`Q1>g8v?y}+ltonNdVGe!(Cwda~dAm8e5K(&-8(e0=FJddr{No^@D&>L!~&OR3Bj| zZWtrJ0d3h=x7O*7LiH{izS%xG=87x81;@d6@ez(Lhx`wDS9&pi=^-Gk1?_z-4H&hJok2!Y2nw>>O`4n>R z;{%5#=LipI8un^ecxrz3iuJ!83B>7Vka;|1HcOx0L?vi*goxw5udPvv3872g3{cPP znV7YZ$_jGe9ssFwEN=}JPEH_d0LK_0uE&%}LOau26=|m@%S8A4qt%87h;H(vIwg1x z4fNCw1jmJ?)ckhazSlxfDs>>9G_*;9G$C_O2&{FrC zxoOHngqMU)F!w?3IJ-DFmR5#1-Xcq`Hy)u9)(}kNa1y{0QS)LA`J)V{4pav*rBvQ5 z3SF@^#m+26(FA5EA*&FXlX9PT&3^}BD)k0VPK%9~LIPT@(A)hZ zaJaWBf!$|3eIZ*#CeE1|FRixIqF)JT8b><(wXWj!X&TP-<< zqJIidQG|Hgr;d%v^BH*4SM|afD!(p|0Tk8(1+Pa>#|fi_;;Qfn=+%6)BWdg#U;bFq zm9^|LQj($_PUS^mq@Be^JOXj|f6}ztF1{(w2qldcEWEI*sL-XH(kSn}bv7214S}iJ zO%IfHTymEnC={v-nEAQT1#xWo-7+OyhbAh1=Z05Re;fkQ9xuhujUQ51ayHd zGDj)+dsIi2n{HdRl;>ci296DcX0%l6r+EU{7r$F>>ZsK!?n&XFgrQ2TD^353Xjsd0 zC>K4Zr26Y}5<9Xj3{H&rr3|GEbWrpj0_=B2>ea>K_bx%gZ4oFk%S-nM8RldD;OTq# zL#XTKM~%%1@nj*gfS0ULRX`h_eIVLtp_US6P{kGP$)j-nsn771??bm94h^LO3_0UF z9+4lix*ajSB#SPfM=TaxCHuQ?kM;Ywt5J%Y>7vCM&KBgs{Z=d0Wejc11ErtE|x!slF>i3r@~7+I~4ZrukQK-Gf-b*&&3He4ack zvT)q1W}iy}tT}fLsl~d9q=AuH_TdjhK_pLrlgESqtMJF50WFZ0|544YGWumRCb;?r z6;8pK3GD*N4Le30qYF4o)o7e~yP02VF+O-=gmxjB$0h~XzL9lY=cD_nb0{B5#^AtZ zc}dDJ6FI=0R=C#4l)NG4vNA<9gFI0tK!=1Rqtab|`#>K6ce^PuQc@(jIFnc#%VpNE z{J20yb|*3jfp}4(POR~2U)-Y?$5a*`fEjrHwj{{%XfgnYZrsFY7Rx0}7t?^^Q^Dbz z#zN7ZkRArHtOnNg$AT~)Z6mK?8C_m#ZiSs4L%rA>%I5b3(h9e#Qf&hpY>1Zd;03<- z^nSR0>7i7>xLqQKA8-phh|gI_MHm&RoOkJQgbax?g;i?A9bj3M7MV@saO->iG5<+8 zlw0GGJ^~&fxkGhBKURT(TwRJ3wrngd0=ZcVBXd995`kPXe_qSL=ehyg*}-NsCA> z8I|rO3Zb6(ap>d>g;W&@7*fPO#h6VN1tUw5$Z8 z?jLTq%YFdu)z!AWu>dBLrz_25@V{2nl<^mgA#ZU%P=kxQ^`2Q>%~(?xlmyXMGn;S! zoj65j4N7(L2vbcF%n|yjegqKp>al9kN+!IiA_I~WJYRpHpA?nK=k?lHt>oDm(}uRL zV{_ARr+vg^GPJa7Dne0>sXUq-Vf;Y0Pk}hJq&E>Vu7`0;KNe%4mRTBZC0!l)rUM||~cKTtnT-r922Yq@iAMU_wEI!Mc=FL_tOoEWz5V%eq z8^`sFpd-{{2cmb%MKVGL^+-3<>8@D>fyeFx`CnBG3*p*Pm^b&DaDwO4(^KDNGz*#zyr+h|1hsG!k;Luv6(C8ww0x4&D}vBLj)aF8v>$DF-DR8V_LQ zCO^%B98(%t&Dt3g$|&9|;dMHyp>HV*Tt|NZ$)nKpI8yiB3u<5?r{R%5sOy<9|Jp($awI^yLC$=Qzl=*j01~i}<7pV_{Ou zNCKt3H7~$bD92a#2I&|8=xG!RVp1{99tvnEepp0Dr=5P_d*dl^C@<2{t;aE)hdFX& z7TLaL_G*ltc=^9-FA#;aAPyg=5rvt4o3X1I@x6Aw9FdY=KzPq5O?e@LJ&@o!5`j!S zn8Lm5UNd5;SS@fWi;Ua&g+Y0&Wkqumj}VSvPDcvNQt}c?qUTET0uyIKAt^hdeAFQtiRBbO;5?H**3%-t=W(-^>?n0@SB=_ue9N`l$HN-^q#}kZCWW`|n?(UC zgVf8BaYeLT<#{7RJGg3gHJ-}4lLpRe3LR=6-Ha57gR4>OQu*&P<&Hx283G+HVOVEk zhdo7|y8AcbAH&18zy%D4n%Jxn4P9g*B=cf1+QZk*bnAKTlZ62(L7X*#FvN6vnOMgD z0%<(_$zl=d0y-G$BD~4kLu%pY#vr)kho121+3O73&&oILAtzjox*7U%c zA<{@`)!Zs!7zpRA4ck4vVx)ec`TrxLIJ_NWY<9{9e%7yjbOEi3zO-)6*!+tO_Sd8# zT^Q0a%AF426C%ok|47OyQFqDo+C#?Zt$~a)r4(FDcNz>?KD!Y_7%H>x8n1lCP;Q`z zJZl$OT%WKiD`UOr+1BYvWJ%SOk*I1JY?xB7?Y-8W|DP#cc=ML&i!@VG>L_MkC_}9R z4M9DMyL@JnZiI1hY7ptgV1V>5e5YVM@IADHc04o18>xw`d3V^KDs!AcvjP5Su&gM> zuILNMmxqXvSktdAvhW5+Ksd3oP_NU~MSJpmu+wZEFCQq7^}~|6+tO zNcRfR;trwWVC7d*#fXNowRr7By<>ZlneW0c=MQv?@|Y_MFB=clH7sg}q{Q4|bhJXj z*@_6Oh9L($f`Gbns{d`RnP)iwZV<71Xq7E0B%r9iT$DOR&SHMQHMKGNS|0Mx>j`Xf z+TRClMLGJ~b=wKHu?up*W?Mmc=o$x9clm1urSD)-WIr9T->27WK|iEP$+*me;8gy!}7|rj@4HaA4{8ow-+~xnz3%Ke>Z4QCM%32A={| zvUo*xZ^T%ad<^Z;QH+TwVZ00A)R66slpEaQj-YWlHiILrlNk%5+~n{vo2=v`Rl{c> z!Ial?+wg-NVS#Ek3R;Gb%>nFF4hmg&-4v9s*05{`0x9-8JeJokSH5V{Q3oK0;mVf; zE>AEjkJVPOwfB0pCuMD9Wc!W)f*Nm5q;ZDX2of+Si@+LS4c!dKL0$zsrgLUV2bi^& z%Xif*3M9xuqyav3qKW|Z14#y)C8)HOL8G&j7w?W>s=pbD`b7K8XUZ{&OMzPrQs^gb zHhL|7Yx1-f2)iobn3Q(!Sd58uibnxPSU(GEO*h7>IO4B5VIVntfke&ryY9$eQ9hxh zK-r49WBiOY{?f`EvdP&T8A8IQW|YDC3j+zImQG6myVSkR{a4g#x! zUGXh3Rla3*{tgTcj^*K){pVzR$lKa*tPb~vI4ZDmO_UAZJ=_h9g%S)2>QtXRi9ZuTpZhq`(bsA(RnvpB!0IQB#f*7dCY zOa^RPL=8t4Bk+)DRcDGZL9evI3SWQ5zz$eq;Ro;t@|>UCxoR%Xv>Sm!v0PYLZWy9$ zG#xp~#jPi;Xr@qR-hnB*<#o=c;idW2>qcf|ZgM!=>QCX==uFD#CTI0d+}%xhWzdz^ z(*$LK>AF2OSyNr+wS6P^T&fm`EZz(;OJ6{OrPApevCaqnhMs#ty+ccWQUaT>J*Zn3 z4^^^&p=25CrE^r~=WRa0?<7umlE%Q$3=CZVV;kdfM*@QLrqby#n8ERl%0NXhsYXDv z_90!RFSXRdV0!5SvkTP~cw=5qd z1e<5%b}<7%!QV2Mac<|g&w*MUD`Nt*@gl`FI|`6IRbdzr$6l(HTvqH(Q(r@K?!^x! z>#SMREK$j${c_sV^K#8pK~f=Oio2xpV(i36j(HmmY=vMb2c(54>Kspr!H!Skj^!hB zRI>o16blhl$S&q8PBoRgerOFXP3&;t{eG??650P z8oX z<8?cR0*jl?L+ZwLLkWz&KK3~P?29oL#d8Mh*NuFOO_Uc4d)i#t1r(AN>1&)KhCvFM z)m^fd3EFbi_3C9LXr~?S#%D@|v8lDaFon%n?Wwv+*p+9yZ7(;hw9&()WH-bj#>!Nl zBtBHhynusE@X}+$!`V1PxfsG>vKF}q{_ORs>N(*z6uQ!&%%+GYB(}v#V@svc>zO^?_~qi~ZKWj0 ztyN4MA}`K)1cbQ>l)SKXhjAzqmq!|>jG{R}D-Gtyz@dg69+YTP_C}h3YoGu$YxpMo z|6COtauhM3Wlu#`kG?(f8OAp~##In&qw7u39se99P(_=$p9JST;s?pLE`XjE8FMK2 zD_{J8oRJcMQFO?khs{V&DNsYtaWF?jmzlY&lSfli`xt6iEw;L0kfo{F1WqYd9b!)T zQGfh*vkH*fs9{`MB^Jq0Oo29V8^)^bB}HEA(f8_yxIeb&MM6 zfO<<_C%RuUnEq?sfYjQXCU;g@VJ=-+D;QF5M)~cHJymxooaWGplT3Az8(FW?+_<-P zHXZ3~Q)at#Itps_s=W6maSV$hHsZ@Vo@99O;mV2*wYYK59VW05@FHb>^x$C?S1aG7 zVAbY_$YYYd^GfGX0{60X85FxEkx)@;ey7v(MR3PCJIH=fS)9it!a1kZp`=BA2NGxQR-bDa#GVo|f z`I`}C)d8meF@=XwS4fH|)V&)m5A-^kx46o8(K+f_yYg(A#RuIhSrUKij5oXIh?Wj3 zLm2_{8LHQ9RY9GMY=e_58CE8abqBC?dw@0hlUl~!>uH2RtsFM_Jy*Mv@4 zZ*EGI!;MMJdVf(K-9s;S21+-%UyrA=l>H&J4i4c4*zGn(e<`tNo`S~1CN$tiyzmBP zMs=Y+18-V_Son=$5@g8=GJW)R$`fJbSWg)dKlzoK=P?{Odi42WWsjVX?;vzdv&}y} zjSVc;6;k?S7$F?EUs13kIu1#UyKg@ic5Ck_ih=0~V)h!o95#~LH%x1tdl=xrIUDTr za_#MW8O2)HP`ntMeRacNa^-!#%Lw~%WSDVjz3r`f)C%43=#qf!6*kN{wn)a>n*9xJ zr4Xw6GdZ?Z6zMl!()fifImYr5ELNZy?h7lEDC;PUB&^&rbOulJi1Jo(UPG2#aS))D zRk2Z8bd5T#x$!dlrfkrZkxVsSJ8JjFW6oGj=tAJB2zof~7@aB?Kb03oYi&?wukf}J zepvzTw}g;aLMm+eK3RUYe2ro*tZY)Qn=eMf@w#Uch9YiCK$A!TMry4;G=b&#*rP!$ zZN+p})pA{HoTVZx?{|r&I_lAX=y%arRgQ;(+v0Fp5mO^VA}MJFYn$CN`A~vW?$Zm@ z3vNu1#}WJMMn%hYhhFz${K=GH5(%wC2V2Ha#@wGmOlpN*WZ}vgWTOSC=OOUVD%pl* z8>By*Mg-$o1M?N!rZ!9u*!o3;wk?VN#~5GeUAD4hkt!^k!JgPvWdyaXnMZls4$g`Q zXEIq>I~SUl0NNoaoUDv%#lIY`Ab#G7C}10+iD$RF#RsI^F@1FaNv!lCiJ?M`8@4|L z@}g!5`I)1Eu~&O_ga&RPkx1DXwDutlC>=vR{x?lYC7uT$`wQ3W*9W=0PUiv-ewd>ABhj zOXz@O36+7nO6lX)?2|NWZAm#&+S_NM>tGqLjg}vvcgS8xmSEO^2Lw+E!hPyMJagd2C8x-79SJ$OY|j%xC)3 zL=56c3bCNuzEc(sZA}GcL8tnelI_&Dy4X8LN%Hq?D9-s%4aCxB9eYWO+~@P{B;U5s83*a~V zgBU=|D@UEkjn}$NB!J3j?**Rl z`vD`-;qO}(xI)|VE%_4wv!wLBD%Me)1 za{3U&p(3cp0*2U%T+b`)6oxi*kvq(_!LHH3vmqbjy?IBjE$n=cD|wtaEP4wU!wj{U zRijEU3!&DbRQF<6A&GwiTETyf*XKwiqFu@Z;}DWQL4DSYZnH=2UnV2cAsXIRh?a(t z>aq)3J_c4$_&JDe`I@C(F-44xj>eK5&(G;1fI@~iAsZJuGeD0&yR*?B+lM_LA)v95 z08k88chYzz9pP0qNp<{azdaroP;0g)3~BwcU{#6nfiS462)f>_tQqJ~ptFlQ!`2~Q zi`_b-t~0fr=NKR@Yr556<-t4LSql099HG$wzsC(bhZ($?X{^l~ja{C?*{Nd}`0a;m z$VWrjk^|$u>N-&fP-OIGg_A=0@>f?(KH=^mo?0Xi-9lTKMdVhN`#u64(DTFYt-o(4 zmzCIF5%UfJ@tXo8=%9MzBMH@?n5am{7EK4}w$!Rqm=7Li!ggz-9HAVy4%*2f@ z@B-q#1;qgmW{tKQxV1PIElp|`I*VjzzOAX8C98(xKpW{$h_tqmP2|O4wCe4s>bCv@ zn|7ZzdsF$mU$34$YeN;F-f2E{maq2FUFn^F{H5clC36UxG%ROr;rvc{SoT*XtuN!7 z4n}my?6TuYxmp*fs@J2Qe@Y+#@h+P18?z>KxuKDMT48b~%Rax6ssu+01L!0#>RAu`abGTTd zjb$abMz#k;1R7NtnELXn?GlkxMf8p{cNmSzdfS8hXgyJgVuyfTuJRb2(Z{YVLm3>n z*S1YsK|}tY8mFYI=R*WwfL(JtP|J}4XX-29G^>k+jO)W*QWm5m5#TSDc!sYKw!i=j z45ztv)-J}njPraK$qVjv(&IdZcUHucyD|!v{#Yv;O1@^=PEWp)q>HAeR&$tZ;Xc0R zw@_*2+lr94z@*CvXRhNwn1ct=8OGlyY)46mq&Q5>BFj>86FhM;a;VvF&0#Mp8#g+2 z770%G*%>k4%j9t@RmEEO6F>w+)EgjpsI-*yDCOph7ATg*tC^0`9`oz{8x$w_`;Xa5 zkI*-n?c$}%LHP4!UKmXf2{wS&YM?a10E%fBQ7^wMdyx#u@>aPT`z#w933B{p; z2cQyffc^XLcCnr|8{G200T~Uz$}$L3g42pq*@yj}pALi%|Bdu5rES zJ+C)%D~BvEsU47jA8+R+l6HFDswLj8^s?b;p+^v}%2MWPu*w$@}u+! zAE~_h?Ay{40v%Z;LNY|tq7zV89I$EuDi3O?)elvOhmH~%@*7s5^F_#CEL5))rY?o2 zWDdCETDZPCkRI6!j#lE0AD&nI*XVjVA)`(J{ruFUp2qwQ?p!qk_PIp2e>}`}rR`g_NAr|1tvRTJkQbqZh2P(px2d|Vca9f@8j0ycbgzviYjf35`go>?J} zk!)z$Icn0mVIrQYXYc&__GQEP|%pPim*wIz83L(1LR^8ir+hBPg;-5jd@ zu4?9`iEiPo9+g;i$D(++QY+=Ln;$HFckuWueSSF5appc! zp}(dm`+E5OFa+Md-(h4o&@iK?U#EOzzwKQqzgmjfpcSH$z)C)3wZ=@2%Ot zf{Fxdwo&VJBmb6Q_?(`mNa;3UmuT<Ah6y)?RdqG_Kovxms6*A+vxEo=eJ{NPu5x!IdD%)jFh<3htj&aSXU?*D>ZHjaeMRC1zA;IQ(^$}*88lqtopZ{ZSkr1s!z#{Kq7>dO7{RR~gabx>OGh<5W};5F3*8JY zZ4^Nb@$T+)_WNzY)u5_boh8j?^!%TF7$`xuN5T}9VF6zWc@xuB>MiUEg)Io6CDqiW zQnALGXEmF%`u(!*R*#|{MYT1K!E4?@e;&E03bwNz(F(a7APx8{g}SgB-x&+`QmmR^ zPF-BCm&fKbCXk%UpZ=oOd&2$M{nWlX^M5r_;m(Jpy8C4~hm2txyyKbZL3_4*WAEPkXs4ZIT96eYL$;{w`V@4pfg*{!kLh>EeF3c1HLRO zQm%lV+T_VNzionldN`WCSP>`Q$mTy-<83bSqm##dC}FI2$bl23^M{!#0s0NUa?ECt z`jKtmmi^)3&9?J0{>vSr^_1}NTlh&DKc_EpTNTGY`ST($i2)#<|2bwQgWmkv`#s-d zf)$UP7-IG9k=dXlbEg4#r?)sVjFaOeM|HDwLLRbi<oPQiF0&Zh=>G?Fj~n}$9gKAHH{Jj$Fyo7(=C zhBHOJm0xK%0*rHMcjlcVKV7q6aNdgm3Ylo#AntJZ8*4&U(^$1Q%{G)}bcSFh5A^l% zsim)OWxE*UtJG)CR=JKaEWi%m0oqR<1#!Zb`PE$*G+{X+0|?Ygak~^2dWycwjZa*A zvuM3QzYq)v4Ls_}*d=0+r-PzC*OU%QMGt`#KoU#G>20PBR*F!TZ_@SOByJ?RblG@C zRjEaZzNeCC6s_d7^UrQcA+CO}&de9}fgp`!x9}h*7Pkw?W~fDiHt)Fry%>HxMo%gj z;eAYJ;_**Kio4yNKulSTnXGR)c#YKIDX;ebrt@t8Ebc!<)T?--h4DX$!D!V~xe(al zH(5uWWI4Ohy;#X~s#9jevMlPHib}91g6}}#; zO*_{9 z8?b>3HSMF4QkD3~3mwzPLflP!k?6uy8|Ji)Pml?Fu$=Qa@#BihIiDmxXTXBO@Vkv4 zzLb}Dv@cyvOeD@{=t*@Fb#|%KKd0BX%*cBJNE;KpW4)?v#(&clnS8J*_2k`ux;l-= zMuvePmssg$u;UTAJP}m3%37vZ<#@ds453Vy-%>!ZhhIqS#0^B$b0idX()^Y=RSJ(t z21!KA7i`Nyvcb+#L^Kh|#t5q~i7fHsbR++n&nX!&3VsoySX z3z{#BMnXfb0-qhVqBr-elmzBj&t?hvMrCV)vxlX?o@YtuZXfiWOYqJsRutv18bN!$!1jKfIDS~nM zm0$9A;v9A%-VoyAE(mbQ&>s1Gc$GrhJ$$D~R?unMk~7UYXU5JTuK$R~r0W4^mMJMO zNfEKDW2)^lvpQlvT8ZmR!Lwv@z0tP0<^7Z%d)j8FSPc^G!&3@(b{NCjufcBRWPV*#7mn@6kem(Mv z5Xm5C$V2W6s>T^SXih&o%f+pg`InTV>DWCfzD}7Q;aX93`VT^_rRS+$~WnAHyA)aFnSwjb^ zI_Q#^q)4a!xo}%s^bkUteCcG;keu2KQqSwL%OT%mu&q%3E;6YBUa<4_lx3=N#Z zM1L-6_VtAv1b_PjyQUiJW@r`!41Eqac;g1#af+CHQjy_!L4!K|>XWQL!fcA=xV;i; z;v8Il%OeQJTQhP1j-JIb!=OF7__45k{HUfSUUfoPwo*hn@vy1)6Ts#hah9Js)c4Da z8~k^IKzyHpWT~@kl`WSyuCzU3xk!&<$M*@hUy+}lyu6^=GD-^-E#lxUL7Y-Cpm8)w z3@L~>o#CDv+>6Zt+c=ATzG5&?FII{wRACZ-f3 z62Mdu0dE;m?U}T+KEFL5G4KNbm2!)-68g{o46P^DloM+0X~1(pG<-bnlQO!EUcQex zGjK3RJ!BwWe9`Fihj@bzJ*r&ZCWoP_&evnW7^eF5U*UL7{y6jT5aJ(on69*D&)ekt zfiCv6*jmw&&XWK-^=V03u1cS-%Thyj4OIK3piu>uKfj2lN+H>n3@&{P!aygkm{x9e zuDZW@h8GKX-WQ`@OZEsQEHjL} z25_Dfi81h>&7XtEf-TY|&LPi#AF%~Y!p%EXn?B5Ug8ytM$Jdxp^^UoTcv>~~x#*xN zLIN>RYO3SPrV46-+bxxZUwaQpnLuf%z>J1Co6Sz$e!PIV;r)2VH>%mrIK#$~^*?2< zc>4AAas8O5%l(vj#2(t*9&X>D-^ARZwp}kWnjxd550y1a4{x&OqlbPXtVd~^diz{( z(0m5GW$GhQ=6fNTiEasosE%Z@Pj>@Ib*2INa2w?X?MVK%9d5((vzIT$XYIr3pOu^2 zw>MiEPPI=L-5!&n4RAENg<~Cv%s%_-7?ucyqOvLe$p~xXEHa_$TsZzd+%W({S;xyn z*x}P@n~;VH-Dq#Kx`+M|~z3OF&qsmxI25FUFNzyn;D!rzrL}i&h2T zvloe4lNE`&*M3d|jGHSrGepo>DJ}yhNl`EACI>Klb>DQ-V2wIFXUQwm2?LSn6NmkaQ-ZEz=FFu`3i0Bxf zo(rW~O=&7llQ-I)C#pJt#w4};J;YdRHVdnfa{LpD>tFA^N-g6!F}uK8^7;gZWIeqk z_`^NjfjAJNc#i)bw+SLN37A?hZEl%~*knd=j&JfkE0qhtH#6tMfX6S+A@x#q;~VkQ2z~eN<6J%z{CHuk{AurR z#jctN>_;qdyE7Y+%xdICeh5UD-lE$u5VJeqRF8#u73*9JI;?*5p^jXsV!8pfb5WRj zYPV7^D4uJBepfLH5sD*#>>@gLct=hAz{UE|?wJYBn*#Dn!lcGwkF z#`<2p1>WQl5-F&=HG_IpgA33Zifk8kWxIffFdks8+QK%kO?MrSe#r>edJ$vN^2=nr+lyj^ z*N}LLUT`A8gVsAT+8@6`oIvAZ1Spogck?ChAVqZrA)SqY0P(%1JiijXIgmgT*5#{^ z=Q3k)8B6_VESnbQghZz$It-f2%mRJAH7@@0Bn}%Y(&{Xs?-eYNkD@1G9(NItP~le) zQE`q?+`v}sgkL^CAi%74%KZ=nB6)Rq3CY*31Cm#~76&;RQ;pr8EObqwhU$9EVkD^0Zvu994`Npc%a zhFgps(2grQJ||YttU0${FQ{91If+e@6fJu}`LePuA%7qu_5O~GIS$r{H))BpF41GP3BaX>PX;cs!Y3 z{=aKI7p1e(Q?pP<#La@ys(y3h8s<2k?bK++v)J{Orvx5uwyexNL#gD$_dL10XyF!9 z(ya=g)_Lr9TLmoAdUqyKDbD5e%mLx=*8$zA_IkFs&VJLTh;e#4dl>~%^b31QpSToh^&M@ zT&6XEZ{~b)eP-qcz6r2|TRNP5d~KmfCuAnMJM+Ci?PQb05!p#(bLYa)CR%!FkB*!F z_WCl9BPEB(dy(80Yu|cfDlstkRK7;UeAevbm5w57u}LX#A)kkHuq{5u3+AF$pfk+< zy$g_|4iK|rN{@X!s7W4@1v4^TcGLuqcD)-PZIuK*dB^gM*;xm){hYzao|Bcy9x2pU zCkVEjugmgEMl=Aria2|jo2V%bD&P}GiR}C@Kt84BG1htEda`9^(HQD#C%vxcsL=6? zmQqeH_ah%0Sdh`UvA^|@wiCCQw;|PcyqXndg%QtVg4UO(3>A7c$v<8-n@|=rxW3Gg zDxBX&{Qz=ceOeAK@-R^=GL@wExTN~Wh#3m(eBtr_ArsOt7c!rdb88u$>VNQb4Af5p zSFI>_-cZfBA6CUnY^d2(;TNX(C0qoXuLd&6Z8#3|Q^lbmYl(cYbQH>6SNZHEl_KxN zsFT3Z&wfF3CJw^`3T9!!Osop3(EcFiutBur3$AKytOd)emmI$wZwaR*AC#){knFq_us#%q zU`R0Pu6+MX%!^7|cd?a=8=A+hxRt0At7+3WAz8qa=Q802F?Do$AOmqLzW{OpHyujy zr9w6=2-fT4UE>2XwL7<F`99R(6dbQ0~GSpU&n{)0wqVD){O#V~N2L2d*E>0kn~k z-WnJQ`uaEc9!|2SO42d-5e(9IJ#+bDQc*a%+sKv*2gKeg3IFgkZIa+I4nY&w7UU5M zt|L-2BCy{ciDvgJW>CL<>0l+O0n>c^!M&)? z0Ppk2;(9+@u!_IR{QBlPt{4EUty6WMO7!i*GzYBK>2DGJNAYD}V98D`x5Dc2L`^qD zS?g=~Ljr$mVHKn4SV>W%_j*=I{p<7KCa(rC8Ovsjkt=code^(BQI{R#A~sT}{C3|K z%nmWrvr9b>T(yi%pma+N1>(#6_t1JmLJYJvfPb*d$ltAaJ^Wi|@#E>znUp+(0()WK z(B1GfuO_5W)~Hy2rX(qeJ1AjRxYauClqiBTveoGjiPO%CmES-zoA=lg0T2s;{#kq7|0!W=kBbW^QqACK4U_Kn{uchSA$29VRh9XpdYYXC9tf zPVfTaQcO5(+e?cNkWc8tdTsEHi9g4jSM)yyKMccRJDWS+2`!}69>r(F)s*-~)4?9) zZYH-r*@h9ofvZL&Kc5E;Z9p@~`M6r@ocEzBV^GB$doF~gzevGL>S1gv@CiBg)7xkl zSoMtL8-iU&5VTFFvx0s?a%OlS?_od7%nHL)96|QZalfXXk8(V`qNlim;21(}l z+fpFaY|dO81LHIneVLFE?41|3kQhN+aHeyjiE`D@7YGnkuI7hDDT@ECoSK44`|7dK z_!nHIF*0GpV`P1$M_<(F6mM6Ea%`i+`*8+_dIx5SA?Y`-MlinL|LTLgOJGBYULc#i7A zTT>OKz;Uj$1eSRz~1I=mFI9WbDyttv^8K0cCmObnT2f!Ug=LoJ;` zw+jS3^-efpB>C6g$}2>{%za#JxC4(AJF~gQ`v=L=TZM*~^Qqz5G&6AK!=) zw!FBdu6D4%UMBGmHe(nS_sg9Si}>IwjauvjDO5z|S0$f?&xVPCz1*_L;PpT6?eg*8 z>5nKTZEVVajj|H^-t5h(8PeM;+uc4Sy`^w-$&ix9^%ZlO?Cw*&R-@C2ip-v0}^ma02EO zv-7Pg0}b=Yo2e65wf{esJ@ff*^zT{G-ydn;u&=}0A1j6iW0|qY8`mUb>2dg9K#}iX zAEpJz*W2s{q(>#|)34LMKHv7Oir=rZi+_D{VC?*8WhM@$v{1otomvEY5ejflyR&;ww^mcw$ z5xSG0?_GL+p;>8Hw_;3s2OHW$GR92?98|;Pye9tGXUkd=G&Oku8Qgs3nlVp#M7YL7 zYDBqSzJVYXq4S=T=26qddUL|0@Wsl>DT*GyA+8K6DFO%EzIbY@WpJt!?^uE7+%4P= z5*|yAQ5p^q82W)M@Vzvs|GGgKA>O_Jge>)pX$X}+7jb($qwFsX3DL%ENWR2kk-g(| z>^JwzXqSWJ()nxr*Xf(pISqL2iOt>TMiejlM{VBPm@;1^8*8vQKhHDDaqFW=2s6G1r_HVu4W%0GWJq&vy23aTe$a2&NTE zvYxdTbc|)!2&lz_=6F!QF}DcLbwz`EQ?Z)40BHN$yabh~a3QUS+a=_GtW%l7Xl z&dXvl_1g0GeVe?NE_CqI^!o_LiiT^I8|J;k&HVix?1MTdZF#0McBMuh=O9)gkO z5kZnqu3n~~H>~%rY#J$W2~66idT|!^IC$dLsy(qau=QJrzF?SQ?smIvxS{NIGC_Y> zy6(k?>PFIXiz(i-T0Cvdi=n-}cAH7v$OcgXqi;O)5OH3be3+4~^kqRWm0`eC%{xz* z8hXCrt{N{<2M5t6DRD=NLmvcs;>x(Gt8MYaGSL&O4Qa=bFUYvml`qGBO$wC5Jo@I3 zdAjzZXv&i=P^{X}fVKC7KUB%2rmT1(2$JnN=K@8ncuj;RK0J&3Tt7i{>4#x6Xrv76#cCNf(g$PRE{i-C z9h3WkM*Q6q$LH6YYF2^P0wMw^fa4RcTQI8Pf4#UZbf@n-%YYFWOs;;B(a^K-SbF%Q z3io|I-0+}JMzo^&4$oOz0Ydy#jCwtBn;J{W0a9%EcwAWI*naD^=->E)fuxTyLDg`W zOQ(w;H~5^&J*rB&H-Jo_%?CUF7Da61u14(H^?N$Bx1Nh?0JRg2%M7ud5iQ~h5v>i^ zLeLPO)y;L;t4y@svyk$)Gc-2j~g1<{rl$2LXJa$I%s6S4>Zfip>P1@ACtKyIK zrMX{($_;(9Mzt$lfQ0FCxi~F6DC^Ytyu4Ra$drz%118&jU4IY}kLZ2f zSU9V+GgJElMm=g0{d3uu7R5J~R__m-a_dBT=}^GpEKy6zs?lone_Xch@*DVEeawGZ z)03--ke8sB5O_@q#Etvih$ElpSM^X46JR*lhVY|TY2Z|3428Fow0K+15+`CIG<@27 zKiIAjFX)Yk@(%?9YuD(R?>5}sW8;Y`@62lLQMCxFb~{J4>%aKpl?7CTFOIURg1?9^;0bC$}`YYH!a?G_P(VrLj~emO%30f(#y5$HY}FdKcBeffX~_ z{FA1gWRC|^^W*%^P1HfciK(Uv#kSh8lt`7Q1)0iWm!1Fj7=Q2nepnH(MmTh0&ONd_ zGt2&ctvkP!{=C+MHqCflpFrbW*B38&Evg9>1v>>puxn|77P$*;9WS@55Y4qAi3e&} zAKob|7W4Kkax2`$9cnYKZ?937>H=__c;jXomlTFzE2w1Js~XjacO!>rPFsgwvWIz6 z4AeRqDPFUT14>)3R~>Lo(utLD5aGhwCvY_Axm^TpTZMlgL&`&kV%P&M-pjfrMrWTiQO3uuc`*dT2YBRRdai5OD{{W7y%K(uDE^tW>i{r;Fy*VK6L9(fj@5 zGZ!A^LWzAIJcDoKW}$vGzEo_g$lGj6PF&Y)kaFvdY4Cec_6T^VRAxTcd1ev2LYmCF ztzUljwFN(Iz|-i+({-UQTz1r2SmjehP4M9`@oAHwJlLDnjs3~Lj0P%F7Qiy*O~F<; z%bHaJYZtnrk$G+B|W)NeL&Zuez(S&}uPGB6rpXmRsSK z-VcoIrzA{+go-T5m-k(Vv_ssBJqO(V0~3Y`%-~n%j=&E z9#ZAjILvp8N{iy+!@RP<*MClrVe2sZ{8xRjkc*Vmc znmco)3EFTnRwjIjvGvu4ZAJV{K+vps8U7RAWAFJ;BEEvSpGw2<*-96F*0@4Y{-54A z`@k0$rRI~Q81E0>kXJr}vucYvn60l6h8_K^Wz9BxQmJ@$CzU-PzLQu!z?AZW%D!Px zVXm6K|GcgbFR{q4bRQCV`C}Q#&LuKHnV#sC95*L#^O4s9v^a^qp+Wv%J4`yB$=BYp?&{ngdwf`+3hiagCMCZH7wCoLnOSOi z3tnhlV{5HgUgVnJyuT}msq=GDxi_VqW@2|kXb~iqLc8{a`Dw}F5UlIPDo`S#g8dN^ z%Tj(aVNbo@JFdGb-iNGXl>Jr$ZKjzRK&X1!db6YZaY5mvJI8n&g zwx*&@k-H3|#%q*~%aLFOa5Un=SMgC3@JxDRH##!k9FC7IFv=Pr}~gm&vP;AACAg zZtCRy`FBw3->&zhhxk3d%CofFYfa(WQ}T=mxp95i?Inb)n$7x=clW|RYcUx~5{dnZ zLCbN4IXMDWHVTVM(ernHinXj1{1%=RKX0^BR+8Ow8`D6jk#%yVqZA336kak*;j6#3 zb*}YIaNkQS%y8kBgnd%~58@mJtj@kS)wgOJs37P~5Cc&fd4_FZaV-wPmq~h`)^fNX z=5uxO@!^(hjh%OP`13ZvA_2M6w|0tfu@st&t$qJK|H^Y8tF98D{7`n#h%k3kEsWrT zwa3G$RF58tuLro;!{;&NLh%(2Bc3IQ{{vGPhYdSEQ}w5RXA6uX@8{+CqZT}5*pEl- zxl2<+R!);0a9V`jV>SeqBXTpYpO zgI$LGz_`5d4nSBHCf1E9Qks%Ad?Kt5F*c*Q;NwOQE{@0xlG>&CoXB1NcT3U%v+!EW zULi0rkzyBSBacn})YE?GFtw@ud zqp5nW+J}rv?6?aXN+i-oNalbNkFY|qpdIxc51ZUgyZwExaV`7%#v0JPPsP=8pYj!> z>1m2m7)^q=$kP^0R+{|6I>5az?U+g~zW=v0Xnzp^7#qVRV#q&%>a@gmPa>PYzzn4J zY?BT-YlBu;GqFKp931oD;WZMX-E}QjOQb=ZE#$(ukPpM=elGqiB**;w=NF%CEW@~e zyKUy|kSlubj@c_pLBo6z8E?M;d(jJYAHKW-G|ZvyR+IGEEdVTfQ!j?G#B%Tg#E8%{ zJSMT(zXu#?UDp8@s6fG93$|3kbkJ#fI~gyZWB*E@SGV|S1)TQ!GpCQ!4zk>E2Vd@0 zb=RUY6%P2h$_53XQv*Mvd0Z;FdNAR2{l}S^s5q9gY02-2EU_cGE&%1X+7_=L9mAe5 zi95qL^ZXLxblX*;O}HtbXS6Tr95*y$Publ+p3M9U3WlQXJW=uDY(j#v^mwvA9<+>h zdL_~Hsvuld1!ElJIx@JI@g_lrR|ZY{V|9wgs1I;T9s2HytL>DE>U0$cuU=?G z98q9NmNKxcy>4Q=6>;oi;3pfKykbm$vk9o|S6i!jEz?VP;hu>3a*z4L3NqfqB-i$JQXV{4o7! zRu1tx!?pN`#u20SuCUW@o(Q$qD~~latEumD;zY&gaT>6+=(YT36znwX@Xvz9Gdm~S zz1TL!1@nIrB{iZmNOnormFV?X0(-16@f@ONNO>{Wk0UBRpywKuwDCo4x=Mb75&jIK z6mdB-#}9!^@yJ;cFEt%3ysRS$M+!cX|1N90AoZ1hxl%y-)??-j>#3rg_%S4* z)TF5#^r@jRxeGMYbsSDHf8rvhKQ(hIINK4))Iw1c?zY7_eRDNQSw?iaEex?t=R7R6 zl%3fE_v#Hoa@s}rmX+nM6^NH-Iq7m&L}AbGJ*mFJN-QXV=0a^5$@rpE+@?3y_-}*YgXzFo@*y{cM zxa*$XMQ0VsxYYge#1sy{Ufz}IbiIn>hS8XsNUpDxZgui#NTgALH<9{Qau5ihuH0GR})n_6O>LDHb+GDDU z5McWkgm~}F@szVANrAdhmZ%0&7S@#FYR436x;@8gfZR z7MP@?W~{V3BYr#_E|TT0h~2AF<-<$?A5QzL;BG(6%$`<*&zK z-HpC4BY|T7!67(GDr!7HB07X6SI2yxqDSMppFB{~4Q%!Rp1t7x3`Cy&EYB1JT6S&v zxy8!NNBPSgPYyhL@UeWErwHBzgDG?9=)qG?d_8jlQ{x!#eERl9xIq}g4wo#gyI6QJ zzSqX6)AZ)Yje^&g4ifcG1785K1pilTuR9`MW#Lm;AXl~)%RP#34-je2nDl-xr$~4X z@~CBNWgi?x;$x6N0?lK(X!Nz|%5HBn;E@0xE=bUwi?0PiM)5k)c2?n!*}t5v(q`G? zy{+lUco~nZ|AC36p13#KZ_-6E^)iUq4|n7a3nGPRPOB%c&9ahz_@aR!a_6Y8&ZKjB zQeKnX`neD+{kiK6#uUgiroCu`!qMUai(ntIUjbu?Njc-eJ?{R2ul*3e81Sk``(p9U z(0I$SNH6SuTz}<{vy!(xzBG+xxCevXf`oX|m9G?cwG^cN>x-X63T-I032?{;rQ0SG z-PZd<#bGA<>377A$1CpqMRNw;Q@3AjAdo>%r_}XQNaL5opb|F+7TRVvfRS1VmW?Mp z_MCb+#uWkg5E%Hv`Eu0&m2&?+qK7fV*`y)9AP&>jdY!wB3?1=vz@G~X3LNoqAzj?< zF0B4H1DttYGLqOuJ*|j=!1yD?;)_q}0ANDU;1ecxKJk{by_!R-~26uw-B> z>JPiPVFUc#Sheog`tAh9zKpbMdraM(?T8UorKyVpj~a@ew286>nNJq^6LmLFLy2~9 ztq@?)yRLIg3W1TbzrL{xKoL19EJocBK_5~)=y)prfXSy?8(mz1 z=KB*23Zz3OW)FgAHuVSUpBK%*~U zZy=RrEFzq*iJ^r-#AOy8f$4Xt+H+$6V9Y+@P1?hU!ddYIFXd2nyitFrhe9{`{ z{M{J%FUmahwF>b8&R0VYhj`lgU8@@XU&W9dvRAC65Wrv(#$tKxp3?r^K*6X%nAek7 ze?!Hat-Rcr#?gWI*Z3(bRUDw!+3UienvXp+V!rhm^Bm4x5DQQ}GPn*}5SR|O(A3%I zYy3r9$)g=c`9bpPZ}X~aGL03IsoWVXN0odM7}1_zT=-$SVGiPDVgLan&oB_!kDMxZ zX(+T@0UYM`&-8V1Y-&OsUh4r73QtcTrZuZ^xBY*(IAXjW?Yuo9@YdlyOGB{ri^E;( zZ@S;T$6pzV2ZOo07OxMTt#7*)zQXRji!pDdgCsM(n$kxUCZ(VLmd|>qXV18u%%A{B zno@d%bOGN2dP=!iV0S2f42A{pFMwFZ)yv#$l0SYz)0rp0?+L})7@fi&fK$7;8Tn76 zX~|?1gNV?`(Z)RlA1{FF)Eg<91g&@lYT^dK#b7Qz?GNx^iP}pd_4fU}cul5%%z^|p*h zRM0g$Bo);PC$*@-dL)z3kFx0>d+e?XZ+c{Jra>RKOjx$CRw zY+g?$v6QbEEWicu^;G|LxM{q6RK8o6EbwH52IBo{=R=nHB4w^nw#P4CIq7s zELM0uo%q>0s?}Fo{+qhm00&s81JX~<;H2b)57fSw*0^e5s` zlFZ*Z3lXS)8N!cHI_h<69Px{>MSqjFJ+ABDORB6J`!`5y*Y{4_LPqW*A`>w^AH+cyXFlsy4PJ9_Lj)^4Y)XaPb}pRr`5?(Y58&(prF5IklO zVoteBzi(b!y~V#N2TzrB52*&F|0N+Jgz()O6zGz-*tG4C{kx50dCHE@G@l)((Mm3YX| z+irjSt8k*g=t$N4Z^|N-?~~wUC!+|b1`pfLY?2`yB8_5hTUi;#fdmhILU(6%TRy0g zyspdmicOk|rxM5pOfy zjAcPbxzP&emi7*Epy&bf?&5S(HbfD`J5!=xbt68DvUF{JFPde>1dJ3iE;Bbo| zy!b+j{~)?5?DV%~zLlA-7H}%+s0!2!&5~~35+s>NN>eKLjL=jvMqjRI{^@h^wG|rP z$IiZ$6gc zTiiDFyJG3dpvZq4njj=RF{sVB8HZg$YY)7HbKaEERK!pLIm%u$TE3j5ztIgud8hF^ z7>-XJfnyvfLTm>m%Nx;O*4+vDiN=%`AtU-;Z`0epJVY|PnjjsLT*6*vTXD^5&!tRE)Bpw#E88CpwWF^A73v&9dFUPVUb)-b zs4%M}s<~6!Kp@e?5ptvw)KDIU2>|$zu}n^6i~UGIR3}K#>KvH&PLXWN3L-mux_`8ms9O<2z+<* zV;uAxbaNCraz5^d#@@f z?3X>q*?uaOuX`aAUs?j*VrBX*1ut$A{a)Wu6e%plq#Nnc$pCsgpSR0EQ{+ z9RAOS_jQXEKJx_ow zVg~bA%L=|-xQm=Q)OU6^j8n%v zW3acaILGV>`9=FUoqmMt?T(_@e;z&GuhYI3Q~Os#Z`ar%f&EDrqz>6HH41ZKVf0d@ zOuFKQ@+Hd{my`>g&$97I?ODrfEItl?e8?cnzs+WC!=TQ#F-AB7#w6I+`FH!xR2fQt z{H+I2PFW7_v15Mrwo~|dfT*FB&Yg}|{jvJrwQ?@u?_Vr@PU}8C-PD|y!mc-*XR8(; z#L4T;+G(ir?u4V4nvJttd6(oJE{9N)KcCcA6(qpz2SWHxP9KZ*C7 zR_)z^!u1!KtI+zH>Q`ZcyF2-D7*FJcm0}m$oq(G1fvHgM(^1~F9--T6{2n3J<6br8 zNFk?CaYrBGILzCh_xge2Ozzp5x3-l^*5&0kHnoRHn(KPjD(bT+&%SHA?(*QTpBVxm z7jsh-p|(kJQ;v(pfkwskIP1IocE{a@8!t*jW&4t=1FbrJP)c2^)lNdY_+*i7#$H?9 zmjyo{rZK`C6+oRR{??EoE+5Y${8#ZGBZYhBOt*rR@Gj{Ss*UNewQz^cKt#l>rrO4s zSiDg{HUOkX{a5&)NP7P?n=5B+U+Q(`YJBl8w*tudu2=te7yp!<2Ug6C=L0d5#F&;{ zxS6rS&JMeA6m$-ec4BPrFQFoxU%}UboK(rq70 zghpB7r#O+eT>QKj6Du)+uIlhg|*r z<5Z{l5RA$Sj=kdW{7q^d^v$HS_f=!sPV!hRz_KhGC}F*HAMc}UPuCs4g-aiY0dbX3 fGzDM=7gr*qib274iBaQBE@)I9snByr&Y|@H0f_Wm literal 0 HcmV?d00001 diff --git a/src/test/resources/test-macos-l0-lh6.lha b/src/test/resources/test-macos-l0-lh6.lha new file mode 100644 index 0000000000000000000000000000000000000000..123cd9d390b852b7d75b4ffa2084047983fb18a6 GIT binary patch literal 38089 zcmV(zK<2+OVJ&QEHZ7Hu006u)0sy3H4O<`p4{UF8Wo<2KaC3ESE_8Twii}kNBz2r< zq=EGT6aX!7ikiB~ERs6&J^TTdh$*EpS(uPeEAeQv*3R`27c}UZi1kcE`{S$k{&SrF z{`KA_ed@YMF$-22!>yKJEE7r%4K%P>t8xH-{r9_A+iiaD{QCQI_43c@+fQFEU41(H zd3MjgmR`L3@bcr=r*3?E^Y5p(mRz&?XWNc_Jp8%(XW#Jh|I7V7NZ!b=Le-AH~o}QU{`DLdb{e8H3 zdS~aSre0pXeLXYn(k5W z`Q`1;zqVTZXP&+n+h3;V)61ToUHSTYX|I6)47B#w^q1f1?b9zT_5FQ4wf|qPUfJva z-rt{IoNf7^`1j9W7yg>~#C~T#K7G68umAgN=s8ccejZ=p?QiDpyzBDx%cm^$;}7ZW zuMg+1j`?f(IqTq_zfCptJiRmhG}q>NYxCcSm*djj10IR5<9=V9e-C{6anF{WJNb0y z#CbNpihpD9^8X&T{$bRy?OZ3xvV7rh+u>YMubXSw_Vt+a1=|bD_4Dt`pSM};uGx9# z-@U(Ja&JCZKrcURzPa+t@bdZhx4r}|%iE5hhwlB~yXB`&E0T5f{`~#9YvpI^ZQgqS zch^4obMo}-`E~ZU(#OCrTetlkD_>>9$`GrMUvH;BYa!tHE*kz%r`JB2dwFVzJ?-$C z_?PLgqWN{@pI5x`+f2DxOuc#A-#oI{`-&d@ zJ+t-BK6QOh;pO~)Ji9`dr6=GL|2|1brGI#NYNDP{nbYIYjJyXxmB8huW1}_rLD8D` zO~1a#tNHWk%iEu}zKdl5l^|VRWPY+L6}{J@e!TxY_}httA3u5h2>psp9eY=>V8*?n z-#9_2>z`bqHVX(f@jH0=`)ztlpQNRjMJLUTh<5p5&GG1Cv~~a7`e&9BpH;Zm{zo@Y zl*-D=^>y~+9LSa3G{QL@cr^JRPJJ@Bq6Tg2^maxsUtYYr$Hca*1dWctw?KqtJ&|rc zDC5G(ocNz#Q}vc4Yr-0!w!crRUh{?x!XX~#?XQyRf&K8F`tbI?NvEgNPWfy3O)IiN9xQB!&S?K}S@#@;Qqq#w&$d9YP4=?4XZ>FhTtA2T(lCvpp-n()%GUm@SmNjeG zn)KOk7CA4|NcMXq6Jrg!ztOb;p&3(6e0_gc{NySK-$1h9gw??_=T3$`xr*OctXhee(Sd}r}P+2;Z!mmt^9qm=i6R(m@*}&I`ocz(^Qhx zk6O8zMyWsF98;?8FCR$KUBs9bT+>d=X)VsryxCTvX0_tmuV+3%lMmwdhDi6cH}46c z9U-^Y@DLkiP^-)cd;?c4%d)Zh|EzSwf9UJTXc2SfsC~2;N3k_w+IKa%-d=kT0sfry z^Uq%h=b?ao$dDPWWFGp*E>nqY!RsuhzyAp9E)y?q9!IZMM1NYRgYS-RDm{IY zlVwQe)>;~Y!X$s=+`_PW_TGE^eY)BrktuS3&^tp6V3Q z{RIxO;86a(JIs3r@F~Gs^EU65{PO&@>fL;&*RQwbop;8)ULcsXhrG}DeWPVPo_h(z zZ`v0XLd|Af27H!TwRI`1Jo4YDn0^vTMa8Q}|Acu4td;cDNkheA51MF~;}e3Jal`C* zM}nEAkInFxwip%l{fKHRaz|@+P=Khs_91&Pf=A^z1Wyu{U)#{v?J3{&Syu^Z?4jpP z@r%+zpQVF}@IFE6R${L9v%}@=wr&o~Z&F-$NkcN6{10WOx13rtH_r`0-p|fEM9% zF72lG98hA?;G;g$+`H!EAW&78$EfZM##fmVC!2)px62v|@5Hs0gmL}9!W(Zsq*1s_ zp|3XCgE104}?O_mcxZKDqL=9QtQY&PL2~Aop=1^FERCnrU`NdO{5<7StWt1S(-xADs(RjH4hN}RnwMDI$t zR8r6=YWYqH@XfvBs_E=-j%C|?YP{8v=Y^FbXxRuj6l6JJ)@F-AfIU!ThY*=*TPsvi zs#ngH;n3m#&tC`;HChx+e_y{fHZ8cdcmYD0U;8>XA!#z+*)z|q_jFSPEbsLfhD7Os z=Gc7wyZY_0Ysuk+tKuAOZY5^MWb@}L97zeJ)uIb!z@->@#on?a^D?j*L4tEai&`N; zaHvvx&04UViVZoRv8i!6y9k~^TN_pizihEvE6eAI>9eb`0!_2~X zY6nG}ig)qx$VpY}$xAJCQr-2tWo3^xV>C__`0JTv1-WyarwU8-vxzYTvM_@QSHvkH zk9l$bTWJj`$FdHzy^%<4G|2bHW_)o8iIObYWSuVRqGm@eaX=p z`t5A~+~*RFW!`9xMJNf82S|i=B98y0HmA^6w%O?>3aN5so^vdu6nxrCzhtt)gplD; zciD!+x8kKNwkMmu0eP>=$3og!90I&N2<)l0TBTczmHz(LFb&z`P_&~ln!*~xioI*_ zBcd#z$*oSDxrbeUIi>q=+A}y6OiZE@B#%;vF4K}%=fZ$(H*HrcN*F1^8ss|p3Hy4b z0Zkmg#p&Ea-ceT*3Wj)_`V$F7o)Le75HZFOK&NoxnthePzIe|xq}H=g#of812^cFT z(z<2kx#=kUbk4o*KmC1CrO>{A_1siz2~Jl+}Cd&_6+>V^mZFS2zl94E~>hl)qeyL4Mh zlBsp7v3y$^!^_fHxkc*KgGi9d(K41GF|QnmVGn1*RF2{+l@=7{5%CDgma=akHqqIi z3zF^L!nOQHsJ@NHf5vck8^TQS&#>COXa1EL#}rju;~aRVg8yD{NTIz(KG^#~0mlbR z(O`)0MuUpHbN>__y4u8}jo#*6D280go_=h|9^gTp8USXle z2U(ar;i<2YDc(>c>X|d#tqj6364Zr-5X3ZF9Op2Jo2j?L)29XscXm#q$*X+DenshL zC=E?NW94!6^7{IrZhg;H@J%~x3E-tvhIYFkir9E?4EHcW6N!2xXVW=@GsHTS(Nk8q z%~x1CGj}3tH7^E>2ln8FfLyEm_s4<6QnaxZBHQKg)(#3VVQ$r2DtF2U*je?{C>e{p zM)kOGq&}GXq|AkHZ=COxR&+t4QeeM@&{Bi|16(OV0vpGMi~?DpdWBy>TN z+wp1Fwd&`QD{JimjNB5li@1U9WhH%g!rZf-mLCN_X7kb>D}C`TMo&wR15FS%FOAX0 zfhB-1k!{w+#utSaRcL>ThGf5YC~9<2?^i)O7!=%BbmBR})ZvHJF5nhmdYl9}s^)&9 z5w~qc?L^5JuA>U77(H1t9OR`iWXq(}@Lq1ph=SAEgF_yY= zH`KyWGdp7tiE{CRshR|>`Zy%ZQeIu4Qx?9)q3*pkbU`aRt`90PyCduPGM|%<#ZWc2 zeg-XfcpZ`P-z<1xo?71yr9pR%O}ZHW*`dR$_DrYM#t_K>;-2bR&GCptKrta?aH7cW zm`h$o<&l7Ya_^avJi)Y+c-c~y{S|Wz0hWvpGAi>f`cx$9qKNCXrMHOPmX(@ zRH5eL+&gw`xe2rS(3AF*4XFvCOy>Zs10C`Xa^qB*$`d9vHDDMJOQ45M z2&H3<)CUV1^r|gYT~wv1lg55UjqoEPs*?(cQX4^}aXAE$= zp?D9^=UHQOIe_)3N?Yo32d-(B?dG&jTCjn zd}`~I$8sdqzYFCx3{zTO)Xu*CiWM&56~Uw+yvVPlbQGIt)5-N(|*$LTKg2oID}j zzX_&9DqYi8ma*Ii6E2I}P=e83yBJdPMOopvs||{@Jcf>LFyR_KNAT z@0_Z<8lhht-e6gZKqajP7TaL_IfK>SafHU6n~sWcr9{@^!1a}lddUt zi6tuPOSX~H^8dx03we5=fE{GKXvkV>|6eQU*j1mssy~}#vah&3O*$)(&If?Kb!ofF zO}dk&V!Mp!HV7TG@AtcX6HqH}LK)V)DU4#Q7O-x)f~`Q6L|Yj_Ersh%7|1R#W#iTl zsKbp9aE$DTZRAicIlPjv*5^E6gGJC+85T{E0z+*W|nB7EyZYfEu#vUt85}A^v*vZL@@ih3=n}WJ&gjbK)Z|oO%5_ z>aO3Nxl1m6T>~rX&Zs{$_|i)dSC|;e4VHAx4Hlzwf1I=M_bqi7)aoXPeW03Wp}o9k zpZ*{Bja^;7mVk_?4>zUS?E5|t+cCPL+MXaVz}Cw+$49M-S;3>cOtCL8d;xN!m~=wY zAr{A6UfBU~8ndAKNV2*YN-i`O?HN6+rv%F886u|}tO?QT5H6wU3OOxlVD z3}LaIDEWcUck(s9C=hd+Hms<+_kY_7Hf#28i`h-t^~@{gGlI>_YV?SI!lO2LeA^-o zT8|etlX2tO&C)wox?i~&dqa%EcUCE)garI`lA`Y=HD7Ag6>YA>xz~Z#F)-5yBgt>4 z9_3nd(#C(Bd-8K}_IAdEs`J~8wW2F%T=>I`(>jak?CO15KO2FPvG|1DL zK)0Elf*fxyges}bERseYf&FSX+RNMzuAm5Bs=Su;J_aP0p^CCS!ErLT>5Xj($S;VA z-JrUSRY2S+O6fbglY+>sjhnv1GyB-(ikG~^sIA$@Ksdm{6Z&MBn}u84eUCg5$9uSf zSK4#A9+Fb+Nc@?BNnn!34IE*T#A;J5R~MB}((=D;upsslt5Pzcqq4Bgl=lg{%kj}` zb;E!#0!pp~)_<|kH7cdOSK<7lECO{VeHJ_BimTgi3*TPK8%8ZAo!GrENcML$*a@TJ zSeVb9;r|N)eMGr{VA{QKdD%>d=%iG7j%uuzz7dVo8mTc z{j!vf)gg*o9^9xiZzZk^4;Zz?2{vVLN{LBO3c4DhYbiP`qP%3)I6J21k%s6~?o9(e8K-^f zxT?dh?C-tTL3+5wD-|DL0xY(?Aso_xw+B_AF^FSW5Tj5coS)6vQ>?%PlfYdtnlQ1` zW}``ve}wLKE*P`kUdahru(!#b7fgDV^>TS>%t#lEc(Gp=-mLExUt!;0?|IUH*2;*| z=yH2+o<+VJ#rR%Uj&dsEEGDbh)mIrD$^(q13tXXmaM~++i^$!IcOrrHxIkc@{yi!n zu*{oTI(pkeoXL#ZyL>csrn*bbE(VGr-FtYEfWvPnP7s+36g*SMbN(TJ?)QU=D=8T` zW9NQ4ssaYuPvxy?V2g!N#Now*am2~WqY5bmvraRL*@jE$IF0bmFMRXY2E9HA=U`@Q zxQDv`s7_tww4Kj0_6x}vY|(;!qNm?kDQ)b)Kn{4#Li0JbLncC>sOU2-0tm=?Eta2v zIg1#sr4Kewp0mwBW#XE(PM1Jcr+hxwqB)Jji9^G79fPvl432qMR$>f)xdmtQceO-< z9v$p5NsT6Vh97K5+s(JmXS-B390Ku1EHU#TbtyH=h4zEJVsQan~TT$z@6?CvOxG!&q#qlFse1a0+~wK)eEQ zhiC2FU^~`we!VYQrN%257wMQs=TKzpi`!~cpCk>umqI!VEa|c zI0f6Xv0xPK?$@x|f8Tfn#?W;I#&1UwGpg_M7XZor`f9F1ZuObpdemI|UK$sjKMY9} zS`GBC8cQL^GOUXGLTR&}=~yTKa3`}*=5)i3*O#s{f`9mi-pYeO;HnNKfb`}NQ)CfF zcB#Dd0bVodu51C8qG9?>FFJa*Vt{09^@Qkk49AL~b5JqcA+>LgKUWZg%));j0!zoo`L%B7jn$w12L;we>Ckks%Z%!&`DRWn*J zBvx=`;e)p5(wK3__mogSfOKY^_T2=Wo(1{!Kf*;u`xB&uz#fGJ*G}o%(S=z}q`bAd7eVUM(C_Hh{Aji7VTyXIXR*z%rO*(zd7)27%slgsb-ibhaFV z!nZ84Ga{S}eHgHLe7U6wG=s#qP~YsaW?;b@kd6e?qMB-!%;AY%pA^*&J^>iYF|!?Z^g3#oB1CSAh7Nv|5Ah5|IVTuE#C;u+F;NC$@lz?1a)`65eyBvwAM)C49C< z^-b?hRzzgX%tre&sf$p-r%hZ);3*pY_eUkztY?(11mt?C9B3?OfhJ7#` z8j%!Vl#eg<&FjfaD($jGW%Y?QY6il}(;PPIc^g2n*Jv%g4xcp*rl;d9`_WMnUDvyn z;=e+*%!Rf8mpKCM80}feUOK?5nd2hDIJNE)C5es2@b~o>PD;Z}tEcKWy=D{8Pm^0;RrvNnm9cd;aFmaCv6dN zL8d?p?IE{T=Fyf#aD)9FU`7;qQr8@~k~`kiOwcdJymaAZPNDnVtJo4wEj}n;*gWkTG4uiLk?A2Uy$bEV!YO(q3Glw_t&A@ z<6FieyAYV5SrW3mE86608JkmVH zWJMHNCP(XhDOW7=p&8(X#`#Nlnq)vR@nt#aU;GEf9PzDrqOo$ZNqh^GAyTJv{A&); z5#~1GY5B`=%044(97d|bu=r<7!9K8QALwy3ioTey&}*1h%nuhW#LWc+#mQ9<$EGp% zlvHNjuOD)J-R$5P!jqLJq8OaESO-@LbH<@Ffv_~e`qvg;BwiPr39}E5NgrBtXLag3 z&^sW}yg!$RgySCCa~L#GrKGo-7Oq!Muw81{U5M;hJsfh}V|1J~f$gtMfyAE@2YtnK zlcDHimWfCeS6k#M=3r;Ow6*-Kd?8R5#Zl&^?l@c#yekmTe1oE@K2Gd3_2F_j{K zD1Cm&b#Ujy;DD*}pcDATDZga^;JE_}E0a0qQD@naPOq$*v}(}z&%WXiW@_CSlHu=| z%Zseufap4}(i_2~T+UF=|33s!%dd>HHJ>Xn^Q!cCYn)4~8sUW+6f8azcecd47@vV#B(xR<46OyQ zugzM(@cj9!00$&^?|mMAd1Y}I7?kY25{bcO2`4xLH|&pG!!l(XNHs043b>z z-SMz7;}x;l4FTG6h|0LIiZ6{smawZaN#$a%EikV40&_mUSbQGD7kD{7u*y{w>;$@q zoInPU?qaxc@b#h=F*t6!avp;BN$C} zd5$2uV&PWc2f{8S7yTTeJ15@82ryn(ub<0{LcYrn9qaq!F{#GGp3FYJX1C+R?U7D? zFC3;xsxe-aVOlcwc*r3Yl%6!0)S${jlWxC}I-NA95Ov_n9%z0{M*nPN)JSApsYC5! zN|yU%%JWoIweMA1$ywKd&X>sW!GO_+y7J=)#I#GZVblx7=F^dBlxp83$pTb=$H-mV6DO(zo4Z$ay2{ z&=`dbk^Begzn?TDWAQOtP27suG@4-0B=K*fQGT|Q*gP-^vcpY=3GvQVad|0UP8ea3 zP=n(4rc~8f(xDEHOtD~iH#>>-_y>Quy*|9YiPQ;QS;&RuhwzP5t2+h9YdEY+jA%RF zE+qQRblrx!+?a+bRXW)v}1tT5a`Kn!Ki-&9cfC0NuCt|S{# zd!|in99;ADCeWtD^0Q}xcKWW&C!e;QK^V$DB2@pL+YAX?Y&x@9Jx$}mo(lsj0QV3b zG_!A;Sf_HhYI(G2H8<>*h3AFbsLcPLs}&s`eZaO?8#Z{mb8uD@&}e{sDaP`Q>Gt+BFK>V+ypops-@D=7SAMWsC9CVe=hv zp?g$c#5(QRz4+LMEJU)Ep{N^hktAnsMbzFJYPTC4 z5L7Xt22hxJk}(0Z%K7=GUo2q^2}|{YD13HkH(FPnD@0h|Mjpa@W=?#4mwRz9E*_t} ztu7|DGmkrSf)#9kpUZNU9jum1=fjm8ZYTvnbDLvH~|U&Y+`5Z zr)a3F%Y2b7^gnIS%Q}*4I(X9<5I!I^)#9kfosCG^B-m4nCLpB+3eC$3f=C*)RiOT} zK8+dET%kb`5W>G`?sM-18x5QkJkbnqhJDcOVEqV=9Z!l9ztrZ4V=w zkTGs<$vknKhG9SWmB`g#R|QKm4taf09V{!Wj8SNcfHYB>sH!7}m{5cmk*!&Fon3at zsr;c52k`Oy@_KP5y+#>hO)of+TL(y=4REVW|lIL#Ad%(mp z+oobsnY>wi9%b+De;zMeqfQ>>4#FZrExXA5$R-yFB`?--X@c=l9f(B@E_;r*h9gt0 zGL%SsSow2#_v=SF^zMF6buWJ!u$Q~%R>>>d(EVP}A`xz|&qcOyC=%aHb~}k;PQEo04IL&uvHj_r@aPgdGqZY&=R6Et_Q-BYM zPl7Pa$xvt8dP{?=zdWC{J261-*FA(+#d9mVq!sSw{2x9w?>KZV3$GxgM=J>3Gqjb@1E0Y940_A+h zEv+w(Slosm>(rw%aF@b-4n~ebNu(hya~$d46-u8?okdA3cGr29xb>ODB*_$<<1IZpOE?8_CSsCoCs;?T@kfNm$AiL_^E^{fnrA;+RyR7Z7f zRz+hpxp1gd-#CvDZc03TYd9UrnxIKE1M6qhd#v z+?TFKRL&KXE2ed^slV$z6Zexd9f9EZyx{O+LDWCMFja|XxP|n^RNC|@gKvz9;Gj-& z{56J^>b;Mob_a0dTh<(2pNc&W{NR-@y;DFF{O_BlhNtC(q8BsdKB`mm(h)43M+`W`XP>=8;P>Ut)s?697o5i!x9!id^ zoUbt51qC+u{jF4!nstOl-n+VdvNJ_`tPh2v?3eD7()b?R(M(()eeFd?rs86eO;)X= zrFL300Uu|zea4e~sIlIUSEw#T@^FRO{P>aE!`7&?weE13=IF`L)vA6GWWTr_ihjH7 z_ZXGxRp2A`$vrI+@D?Ky=X=5S#@>QQ!}Pwyb9**|649`Js`5J%q%?)x zAZHCXIb11q-+K=@3X~$YytsSrTe+>&RAe`SU;(o}J)x#S4VMdpmS1(9$l2F~*v3fV zvm6Ysd{BZiI4QcC&WzxZ(rYz3j<_0*X)NI);{pWAmh_6m#x!qE6cmws&%giBpmm(n zazm=iK`Mn&YfvB{B<)$I>?1BfQm~zkA?pK5yQ8YEpwGtMf&vwbJ$ErkWy;-5ch#() zxM);$q}|-bfT^SQM1Qf~Y)HXmpp1@Kz{qy+<&DjMQFPZ4ma{bjCf0OgVa_|5p^osp ztz%gymw`-$SJ$Hk7ofnIH(N}`TZ9RXQuhkOBx1ysb@c+GjL_qO-o32RyFvZdU3{i2B;RVv@4(ar<)OwthYtfJu;4Un zGSXb(&s^`~J#rq@t#-#M*(;?Da21LL1Ez+=dsf>wCKHpMu&BTfLz)!=FS)|J@n?Fr zXg1v9RL<&0xzLa_WAfW8xM3(zUx&a`x{0uZr|p{N`(kSYq@tkbZ7w;jjed*MlZYr% zC(MivP8-n*?citcat`B;nO_?8Q&VK?B@;zvlHSI{0xjM1C8P)G`{s>AC4&GtyDvru zqzG*{0|ZrAs}B^Ax)v*~YH|b0$9YatGt$@#WQtVC(ZDSzz7Iy#c@j4_BZ;oG-gCFC z6GgDq_CN}!AEn|X6Z&L$&`pVq_*1XcC`_Cl#A&nAZmHU~jwwV@zzKqJcPjRVBq(JS zHm)mA=ur)wwc^&uwno4N)}Hc>iWRgW-Q6u#6R!tv8uplOyADpq=g7adt(l}FjPT1l z3_6~JRB&c9<102hF5-q#1#D+)#wKXMwYUkV3}6!=kSf79p|z4nyUIjtU7?$Rk$mq| ziPBv0c}IMhQdrWmo2Ik9U_x^z6ZL^;=;!6$iU6sd!sCSoZZ>A1IW@7DQk+@FSOA`> zgO%%8h^iJ&o4;(P!B=pK(6*hvP_s8fNrEN)jMSuy5}PLT zm(3_y+<>QxMHBJhGROg!qgj?=f5mShreI6O|N7Bn@utAwHO6BuFebOfUidPUzp@Rd zU9J}AbL(ukmm3LF>*a#UtFDIA)k)#G-?Jm(8*O?i?2a_-=KeZLR0cWVBfx>hS;6ZO z@I>P?nlc*wGd?70yoe9+IC^yxz?T0VFMP%=Lz@-az{0~_V1*Uqnt#3pao^3+ZmAf0 zSsnDqLJNle(U+`Qo%X6EHjd}iGqXBEO;M=BE$%vIMc1C$NR)w@!8CdyW18VBCipXJ zPWyw{ttL&gOzs8G&FoG}NJX(!45#PLpbR`@u}H;f(Y6qicLbnvexdL+QZmOCMwA^5 zCjmqMaTl8(Hp-$BNB zIjTWPcF03HHoRZ9J={S8oKI~+sTR>IvqKk3OM9Ddg30Z#NW5@JvId7ijB$%JHmB=& zE77YW-;Ok+2F2_=zJ~q9Tv$Olxj%bCa%-C+b~qL|y<)5NM#UEDYxDE_lAT8x%}CDw zaF&u&bb@crkD~Y(wu7hGF)Fi#))7n2FG=vfRfp+@X3pmvs1;jfAlvz?`bDVPPO8ne zLnKunyo3@c7*>;OqjFjQWCci}hbwK5Qgg}1An@In8p?;$!%Hq*=nwgs_FUr0|z_6?2H-AyY*0L+Ix}R(9l@6K%H{Pj70whDX zP#k{pL+fn&f)_xR!a*I^hNy(^L!Rp@exZXX0VQ>5-{pM=yg?=#tszg5i;99h;l;mm zt~nPiy=y$U?m8z}aI-@)jKA@Z6=ha(ziC=xPdrdcJ-B+L{S;9?6KeQ9yv{&KD&(v} zxdHyMg33~aQEYesAnGlNahIT@^J|&7t7R}VSY^s-u0B?Wu({~q`}}%syQ9Y{XqAJwr(vX zYvE_oYe&U|W&QHsc|MBz;v+s1(!Bg%VNv51YNe?|KI%fho6^sgB$)TV;(klh50g1* zs64yH#oBi_Qk>ywtNF2 zj*gw=<}h`&88h#9bW;%={CY~T8pDSo`NSsj&F)(F!e^j4N3N7mU8^Is_gQ0>ST6#M z0D(pn>L`fE+!{A0?7So^2fiw7$)?Gqs?F|@=)8AWhQ#p0cj8Z5>`B9(Z zufBT>Pt?bt?E*3)oFP4_m6xT{K}C!wOkzz|)l&$H*unB-(=r)qi!XpEuY9XzP8`5@ z9Xj#f?B%{do!+NK79jNaKY+bDt)zdS*P-Nya5>zwbw8fS7}Efeu2UleTZ(XK)x43_ z2ZhONQDv>@8_L#)z7I>q7Z{qeW5-|bDZ?R4=hQ|seXC&UrI!&2$0xfgK}BO?f>R7A z<&?9S$IrGi=|ln9(0rV>xZ}1=B$PN~OjH5^VUNl>sI1m9OBmwal7z&00z}?@J;K(_ zYpostzBE`VfLE|d6inD7!b*Y<-mx-DO3t&wx=9+6XHEoSjT$mwateoNc&qUDFG%nP zmhh(S&L=6-ZRnU8`TAfJ%>)ty580x`Ue0&=J>NlEVxh*^&VN^lV5X$xI}=F0l?2-k zR1LvCV^ED%q?53lYZ=%j1Xrw-$*GaiXEDJjk<$7+G*L$~57CpL!X@)r=z}lf5go9S zZ-5N8qnS_ohbr%nfe34Ju>2!wGU$nI$7wC}-HeK&J7~)kxNg~@yxVfp0oZ<{vXnJZ4Q<`{rQAft$N0Aq0OP6FWL&7C) z2$jfGR8n=%zAG^T%}Hv-r`3`^%xle7dY9fr-sq@nNNm|TSD4AfZI zO`9lj($PzQm%t+cwUT|!-H^&+nbR!6=Q8LB ze8;WC!n*NH_vIy z`c+V?&4E}~P&$Skys01rOj%kC)6Zat|Cwm=pXkAi^wP9~@|F)z%$OtFD*9o}lYK^@@9p*bmrx zlsE9bmjhwp{7}WzXX9pvt=Bc`m&mvhY&aWa2ph&heasZiPN7kK#T@lU{~m>=;fM+7 z5&`YCsr%-`mv8?m8o{{*)NDq4Wz9x>m{9mBf|^8j_tW~f_4~2)abS6$`SqMl7ZUYr`yhqL+3%fT zq^-C*M+1)UK(KGAY)Ac5Pe&^UZAoVVk3!zy9=aMF`Xh!&2_sHz%DI<7#a zA7j#hwehz*rrX4iSYRuMdeX|_wwZAQmEj_lfS({|ss1i1?XYDcT*c<$z!U;wS^fz* z33@Jw{laXn;H5_x+p$7^gJmJ1vx;Vkw9r0;?gUOk8{~Icwxzqc&fgCjiBU#r6It`@ z;9H2j;P4M%xaB8?Ts{L&E_ekbOarJthl8U3T4NpF3Hjo)ZEGPBPcdN}S z?Oy&|#O;IJ*_%ppd=^YOi2du9<3Ah(ySO8Z3n~)!dRKi!@r{SCGFFtxrl)ZfBtDr6 zlc9+p6KlzwVBbtuDPB2}(rD!7E1w0uEHSSF$5(Ngr`x9QKO1WdeEb2IG9~Z!T;VFY%mczQcNI_4~nrEkN&9QcmBh z#sKtNW`eLb_RFua>{Z;nP4p0fdPY*0i%oxVMlWsZD=2w@GP(S`Nl5Y#GSDIZGomSS zCBVHmhZ3;?+ms@G8`AMiI`6<@sdw?=gVfz8W85W{SzW7bJW=xO~X@((R{8be{hV2 z)|ThWp(wl5lT8CK{&5!0!J%x_teyx@@QPXZ$t~Ko)lnkBbZDl+UbgnruUc;pcC6b? z^o;OfAs)KVgfUvlX)UpK#sf&$b#ioAgxKc*{X82u5FG1^3fzG^+wY2?M$F};3CUyf zB%R>yHnwmsYGv$md{US?0oo%K20}H?b{P$95I%m_IaUkJ=qj4lAU0h@8K-h4AmTdM zavakz!6h?TCTt)alt&#z7!8Q3bIGijbSK(TNmzAv@3Vx1|-{pxJEy+sE_ zZ(Q{3K%*_1+SltWH^Cp_(_FLFu@z*+8OE*zLtF?F#`u+{g-g6$-mG@|Z-bNL6Tt|# zNr@Y<`tECmH7v0m1mU_6xJid`ynjS_7KrrBVdDac=;q2G)1Q7@5{ zKts(f$7QARJwkYJpZb2$wzz^{qjh!T+YoFVBB%XXd!mCa?M}1jdKJ{%N4^(2aA%4JL{*!HjxJW<>#Sp60MR`etUIn{G(|xV z;f6s6y-7Rojj?#FD4Ozd9BRjua0n$zFGI7O&YQ2whB)Fy}MUk`+<7At>WPtRw zBx2IeGa8suv4(cq>8(^{>&H?!-7#Q8z2`=`-zYKG#6hML+a=oZnWWv|m`6_c-cYTR zD}#B|JnwWjah4*=H=TBq9o_a5Obo2jD0SfWWUeRIB&05?F$zkM6r79c%TK>2f^ zXMFG2MrEaZFmIggU)sd7t}$?_mMOwrw4?NJVk1~+?>(PGQP{7ZHI=;4O$OKVg1;<`XTwR03B4Y#6XB-RY@56D!m`u>mpTbxtYD8)U=*S*nTmG zgPq9v7rdNGU-lTd+gE=C8W;Y_&|VK2@f!F208nJ;^xr6pPn%-uVuLTb^oq zUe!eOa>?|8)}I?b(B=u*+qAC`k0#D+oPmS5HGH-(5FGGPL8c&2Zc#FTY-pX$V{@)r zHBf6qk|;0JIlFt92>;9}ROlo~)|WOYuV_(=55OAoM*O^GNv7$=AUAw~@@Ad*Tr_jK z%JfDy31yUo@K7I6)*|XRXMqc9-cT6i+m^#H^nYL0yP75J;w%84Sa(^sATtOlzEry0 zGn_NHWW5CI&A);#`gADqWay;zeb zm)EvW0N~;rS_An^@F>C|uNtGj?nA^+DamJCNF|1M=TFwdW?XJ}C3#MhB}e!M@Y+twr*0Oev9d>@APzl>p{^7? zCI;Sj)~LCf+<8^2L<={Bc+$7~&C=%^uxHMQp@DPMVg-^66>>PF!=0?P zO+MjzH|KKk`r{NX&-|ae0oKMJG2M{UUlMw4yv}EOcB2J|DY{a*61!kf?w|~ps#Yox z*9konK5C$Gk1y~9X(Cb6zIR;(5bB;s#Ez}lYh}_-8I70yw6Fz8`05-F={PIr8B^!p zb57z%h!WzhXn8s-M;C;0fobL~Q{+ZvA!()M)MMm{Z=%8cRoqnD`StiK@I6bS!8TpB zMkCX((R)6!D-A?)a$^S@VXg(lJw!8YW>f98u9=yy+XnE1;P<-snbkjSX@BRnK2R5o zk29mfC}W$C3~vd2qo#Q@AaHBFPbWTEaMJ?-q|Dx7r4(}rBoVEIs(7S)Whug$VvV~# zQ-L4Tjt{&|FH!pv0xd_~_tlu8!5t3S^WLsQy|V)1l#}tCTv|S~=cUHs_r;u7q<%IB zX2Qsm&1D@H(+zsDwQ{l&`**lNq}V<*1eLS&H$CVl2a^zUcNVUDdEzTZ7Ed7Q4)G#3 zL3d$Ls^KrC)q*$m>M1wKpKiMolWXW@DistZX&KPHqsoFG*E!1yJ2hsgYk<9Ft+xU} zYChs?)2i^^Lq^-C6s3Wo`1HQJ!TfmlCmWenmzjWgUiVCEQ|UtPNBxsEkx(DK0j5(# zjWR-Bv28QxCnGYAf>_z~k+uLfSjV-IF6&8P_9U!yHp;W8$7x*4pta+N)@JI@mK;qm zxhY{}I|`?NvZ8J};_nFCuGO``zcG%S7%MytKN^s%NlmzdgP;}ovCc)6)Yo8|h!4FO@zQN+Oe+lk90d=6)6Nvl8)&`0f zG!d00LMp30TWNW5CR%Sh>dn(33pPzsa!=N*oA;344j%>&4dn4dyK|L^#2ESaoR7p8 zNjguSg;G-&>C__)iDkNqHc3_d)|m*+bmz(#)x^qs)qq&;g6riKO3g9PoGm`T`O z5Tg2bR;mVC7UvyDE$I5Erz$1G5uiuViP7KPmlK`g5m#&cYSBd_wlz*S*-jD3S&yh* za?LTNp}eXaQ{TqA{mRp4DK4Q?eawj(KH)D|u0p96^JICj{V9SiO4t5C;qw*As@TAB zDp!Y>P{HJrrD(>TZa71hc<{it^U8fgmy-zdhuUhzHkG{wp$Nt$bf`p#ZKwPAg59wf4i`D*k`CJk%ePWM2oXJ=5+BLmUpvb zHPYsKv>Uzc#_L9df`7`hI@dK5VPA9U5IO=5%Dbo1lRP&)@r+%`1C`7 zWsp0~u~Y=_SZ8Bxdz#$|5henPrbNw<)|0N(s3Q)3cFT?CZZ(0_im2(0OA32PvK8RA zQs}E|tVZvY%m25%jw)iPN-(XoLY%fs9l4pQFxebrEA%GM9G)xW;e1s7+9#i!B}ua8 zv>hC?x6-F*s7l_K*b>zCEaiHBRaGq4r7YUqiTd+1=q9(sf4(>>R*~AN|&`3JS(rGPB0hJ<=E_FA&Ab+@kS!OBE zqnFUeN(^0lf@DG*Ed!OxIH}*gm>OFGD-#=f0vQTHXachx6^z+Z4LFHH?^FhD{$R=# zWwLFkV0F_T;?mKv<-&~_3wn!g_nRfMgWi>_=dOL3hM#@8A`Q64kA?(huVHiB2k0s` zNb+tA60MJz0N+t11Irv{2Y#=)MbEKP+P&GrG67wc3wp~BG-j1*kL!Um_S*P(M$p>q(0(|)?5^gZO z>QZ&UYcNHS-~F-wehWt{I9(R2?X8+;3@!d!HLwXb&R(IimFxH;VjnDhq5MYKtaCF* zKK|iHnAw-M#(A@pqbJ*7xFUr_R&cw_ss7!oB8|_JaDbJQzlLE z|Inu9HGW5xUY;?LKlBxv+hRXa=PP1gFi-k{)4-A)7RCGrH=r6TQOmV`MPNkiZi^Xl zyF**H=LN$EUUEdyf({fOfCL+I_g_M z%NsF-x}g!Bsh7wW(x=^lWT;l-WQGq&{5wW-WTvp{e(VkGM9MlyRJh^Qc*;l@^5eef zg6Y82F=V6bxy6`=!z##j$Wgj6{Gqn#5RyBA%9z-g9EFA~#)X=szt}Si{N>y5CO$XJ zaWmh!EOW?zG;h=0tYTkih9NvLA| zbD|J9FeGIaOjEwI4entBItZ%85AOZn+Y;){qtcBkwk-FmKEehp4+bN>UTo*0HnNnY zuz+-UGh~~fY?xHh%4f`3w*;7%>{l3(CYdy#x%0+dpukzETqHoRdfTrkpsEC8hk@g} z3}KH=Ki4@x`PFQ!T`Z*LFzDrb0(yO< zC1o3uA>TFbR}DO55o9*qEH~XM7Kf6J&%WDhLsH6S>0}vRt2HUA;_Pk=iGT%HAvqxe zt(+TI%50$FRM{k_!PFa>lC9LM-EVHtYnE?qD`3fv_XYQb4pSG%7ZyI>7^RnSV+Xw$ z=GZX&*@I6j*iiEueLHKilv`C@OW6EVr@o#axKZ912FIBb?S33$VT=$my3yoHv#&{Y zGr|+0)p@xsY-#N~_%T+#wy~87oD1EYvgf%p*NGrFG&{x^HZQOV9*2j}PqqbEPSQPX zzE?jxSE@othUrEXS*<4%Z=YWQ**P6DR31&Hdu&}kc(QUA$h+>nP-NFFvN0L7z`;#o z!Va961VJZ8@3aG0u1Pp-=4ZFgn->KOcI(#wOT5+<%kkmVRt_J z52rBrAA=TCTJAH+j$y-OIh*<-Aku@>OP6#yV;^CBkZ?l5^^ek~2vO{Y3_Vth1_G3$ zPUai?hQRK3AB?s(4wY08)Ug7_W%0%99o$k?0zQPu-_HgLsy&FYzL9Ah4;!*0MY88Caq48xXDegrz?jI0Uvt8#y7J*stjkKp3Hp zJXqHk1%uH9eGw{oNhTY|)*eGJv50^s0}|cyPXtlkgsHfWkK97Av2?>+u=D%i0uLCS zSj&l{aH9QNZg5f3M#^=2);^vHXO2tV+qIQ<7#h58?%ebzrsS@`0x(*UR5~pM{rolN zGu0BR5OA2))bMyixS=x$3Y;*i(Xm@*?69GyMg?qoPa#mOYGbzawS6bZxCS&8xVROs*((waSaQ7)a$u&y)_*- z-Mrg-1fhT+L|uqVEASdi`j#2m5cM={2CUMl!OHvXX^S0bphRfBdNJz>rK@{!u>*Y7 z2(qQH*l)OMSashGh0@4-?apgM)cpDEcXV@Z4#_jkg!yGo%bjQEky?)n)#Iqa^e~B; zp={l}ZSc`p*SI4s{Ma7^Lzx~eq{fp{v>&LH-J)Dbieh$i4gf+1T1*sojczF=&%FoZ zBMA$t#)`s;fZ5y4RaXoiIp1h{y$jx;(Q583r4B4)L7GVKdBTOEX3dAPxV)@GJ85-B zGqA~-CNipqOPvQCTsf*u=pqnjRA%SI0_CGPJXBQ-zLxQtxWpDwV(UKHl5YCEhaKEN zQr!Ru=0(>Q1*u!VJox+`GrCpuQJZMN21Ul=$tFNo`h|Et4gaj&ds^~vwHS<6^u@vB zQz3fv+y`>th_ho{^E+M&X$!{kewKwSX2gXp>eeehO3(trA7zt0PM)c~3m#q&u(F8;{Xa7%kQ>=c-)n!~KV)Zn@eSr(_DOL_ zKmObAD;DOSiSSd~d)6^bUf^KpvfB7!oe^cox<}xY%gY6T>M`@Bxx~gmk7MpS=CCnk z!cQtrAZVPO+v48SZ5V(Y#gjk|pcG<7<)tBi>$igCvBk<`!T9JI(@G(2f}T6WC7)pc zT=TYD9GN3MWlw7M!cl@v`+ini=BVmK0{mJ4T!jWVNvoPPU8XMaW2_gxOSM;js`QG_#(8t;e{j9x8G0`T0#Jmg@tn38|edqs7&E$-`;$>-4$<`h>x@QCj z+U3w3^g|BoPhF;5Cbz{o)kd|9tcN#h!`5E7)ePZwZA?MyVJp2}r^S@R(6P8- z+rPeQk$?+f)oW=u-o}j=nz;QLRazTjlZ`E{DA!|rHuk0(k`uVWca72+=3uQXe_^8a z^U0stVE+&RaNYu$y;c>gIV~XDR--4g0~)L{N~8B7v#U5M=md5XeHDQ88D{Oe_rV+l zY!=c_n0}$X4k{)>5xhc^^HG?JGhzsAe>J!wbc8)9hau1NYR0UizuJy(maER~Y+;5l zmaJPvvPJ6SuVmXK$nD$o_t0a#dBCucb2P^CUw+KI7=ZJR`v^bi0|_D7&)q|7NDvg` ztqB`@yxKl&_d}mkU$$^K0GO+&sIEEjS7D2VL_+wp@r zn&VfI;Ry*j?GI0)%~?8;DvKQLwe4C2HWUP}Dj=@avl+;_3(e1NVzkw>J$m$1)xqTB z_#{;Y-i~^ZE1DJG=S5RShE`l?M}{Hv+OZMEdgHiBs&o7^lGX`9|Bo$!2v%DhZ;yB0 zREU`Ti?k%YLns#|Z(G!VIU=GhBw7Mg*(-$B)~G+gymZJcc(Zsr5H>?16QSIrKuTav zuUOzq_^S!M>}{M{0TD!tkcf;y!jk?-bJ+lm^XF7GLm5S&PytQ8arpX8Bq3e$VF>TIFX!)0D$BCH58$b=rf-sK^=NE~)B$rq@BzAFS5O7Hc-gxmD0;UMv`%NmYI# z(ePUgnN+~ImSVnN8&$@?PU4`X#|2hH*n@F@sAaOxcSCHmO^0oQbu!H2RA|~>Idv9G ziUhIShC@91GPDV%E>j|FV*kS(5kS{00g@8+;agFYS7*m z^pE5-L7?d!Hgl-U0GS$#z%BCwM@St_@=6Ng7Rl#nc;>zu*uLkLEk81u6e@%$gKw0? zoj8~X@7RDXiFk%Fs@~#OkTde;!ZCI|nxcsaoi|B6ocA=ztq#?C*kac4SOo%A{0;sw z9Httki>)ehe&-5aZ_|D9`OUWsU;?H~&DagfpbzQ2*yQW7W(k9JfbxmJEjADsGc|331k_jq_m6ah z>K$bO$Zwj({DeWf>hO7WA*9LgPZ0v|1&9r{-WCp$vJhM_^(m^z%2|pW&-UCl=bVN& zOvacFjk!+2#0sD~0|(G&kd~wYjRS9u$K`K8s)Qyk;Z;$=ux~^@Z#X<6H^kv~G}3V( zi4=pzB3ysACDe9Drmwy@Ps$je;2uMS0qaod5FLqBK%KFcZ(`AgNg@z}n~SWXChw z3_LOIjtAN7AATC~Y1@?yp;CXEFX@uYvkf`~T~%}rKg_~0Iw%mNZm1@n04)*S6oXpa zns`h*x%YW(hF$I(oN{<4dkXs&>L5n<`0+#x@Ce>V9>Q8sq|b4fxzn@TzrHI5V59{V6bOkXa7} zc`(#vq@)arYH1hx>P>(c;ZDGYsfkA5FO899#$aQCDs0vjP8FusB8##aU{oS~%X-q@ zHhMsvl^$1~a+u~mn_%e!2;n>gjcJiEF-0wNsl6C7gM`Tqj5JG_bJ$qaaIIl8&0}2z z$}TjRi~z?@kWNDavR>~y$>_&+gAKYbK@0f1@H|EWj zh1FN8EFSu2B)b%=Mh~>D%^skJK-CNJs0J%l?>PhF)iArGH5;J&4@|>eg&&PO<8|hp z^wFPiBA1@hu5}Sl5(eQFtYtZIP1GSNz?qUP9~%=7xcN?Cw&!(-FKE!TQB_ddvJ*Y*{eqOuq7|?nU&>o_@S`zC(xH8pe6){Gqs&IOryXbz24}l zna-^s$HR@T?(>dCiv>U0&;Ub5nlxy+z}FlygTOVS^W86%Ds+KjKnqm8j8ztu_Z?Je zoiFkY(dwv9IP6SSj)N0QfWyRPDzk8wgiiY(yb-IN_6;k1g=Fn#us2e!k%K+;i@u;x z-!}16lnj7cKug8ddm0Mk|CK1p&>0rQ;1$szJ-F|1XY}Q?Wlhg;Tdd-O6K!}?g$FNZz8>X^;VRii)$&I2v{10Dfa|FLxG=xLef+XcK^&7w zf4M)hnvGMVLikMmHKU}xVN(&Bo(mLg{AC_Zk%MVbsFH;dMc$+eWI6gpa}WzPRXLEw z8}z=;$G{A9g3!oh_=;3`!h^WdF0kpg1+CRv!R3c7Xd9wF54tS<74%6hx2^J5~cGGVvm>Y&Js%)l(4c0xZ z*2oWV;kNVAa|$T)os6qV!ePmJFb2We-#K?lCr#kbhMJz5&?+-KHQn!{nI>TapYBDk z!^&pQ?SjN42NoSRK$&Am>&{xVAaJJ84*6VoVxeRQPn7Kfx7?2d9Mgf5M~$QJOnKK> z+|7K#_2Wp2I<8@zY0}{NQ4`rq#%}G!A8>LTIU3qdBB36gP+RP4#acpDCDP83{!rR- zMeP$0OeO9=+epT_C24~Qo0!QysRu!M*@*|F5Mqfo6q}#>?2odoNTF zVGV=5j(!rzu`!U^FhMR240cdbG`DCm!I{7-Hu>4s_(`*xm?^*Ogt4U;D z-D(!A>(Nog=JuoO7O}kCFcILhipvVXe25Kskj{y;QsGy9P=>;05-8`#t&GCvcLN_% zT6(^xqwe61`bb8&@_aRuo-$x&OPNzv-T zt#5n({&MXs%#nnHB_kb62rzZ*o^%n(00-~id$oy~IXXxf4_I!dO{o2CjH6avR*y^~oa?H%Z$Zw_3zAMpyW*p>1?3h}kPl<`mJYcE# zWlJVlv#$<(fz6Lo))r|tT4v7KHe$cpQNroN<)dz~FGkU?dCx^8T3SWM%91SRx)F_b z%DETQwTC}iY|xY>-x9!s*{8_Bx>%9DJSxt3@rIX2)e~D079jvOa*%aCPxegnvP6}0 zcja<=gV(uqu4PIP@w!lasZ}Tgmbr5P?r1(lY)4eh(4%)j_mfIcGBqNaL}pzrz%;&- zCTt9}$4mlr!h?SelhR-)+EP0}?Eq&TDWW2mKr8aEy}UEJ*kwB5xE}kX(<#F=uh&r- zbML_~!Ak_GrbT2@ZN9@jSiN}wda&ctgK$QQ`^Os}mUP(q`YXtCejIy+BJ{ysCViJ* z0pQS$eeuFUfJCXWQD-msu2s887+Q)fT+4eZyybHLti2{pQ5s5~APOXB1vI9AHu;aO z%ALaf9H%L>itz$KTN=uPV(ayhxJOZ{0v2BPE4aS6-AVOL-rswt@0f->HduyBd~=-3 zJ;PwZGGwt%(t>sq`&g?f&-b-IxlC=XWt5Sn9b#PVz%j4!&Ad_Ls6NKsnyG&;P>Zl~ zHj`wMTH&HnTWl~I{PH=-q2j;QJ=mYin!dv-zQ>rOobRWxTdQH0U*)tEabv#0I`BE+ zM>3OhMU`~28|5I>!oAdZHjxcp&>zW|@pc;Erk!Oz9BBLJnIA zq+&3qq|97*b6N(g;d>q@l8i9nK#S649Fd2`Tz<<(|&^R?TiOn;2}$ZyLZ}N zgGKX7Fp{Ex>aIZSRm+s8iy9!3S4@`qY8XZ_sw)q=)4wNI&?uWLF+1#>EKIm1D@u_- zWdyJy+>{xAlcP^CIa6z>+&uOT7p_+=4Ou?2vWUE+AhJKSz;A@ot`;u6FRJkiT6b~9 zYchgeDf1ZJW%*j0HpD_%&PdI4p0uU3-_V%uY-kz&W%_+A|tJ#__gD9 z#4ujBUc5&jhP*-4IwfY6sbKNr51Fyab$aA?3@@=vgNq%T+oOh4Sr;)w*(H!uH48#l zL>+U=gici(2OB;OF4Ia;*!ogeqDP4m4ktH6f^v^_LKD&scXZ%8N<|`zSJ_ROU66y+ZB=<34+w2R{o+`58L?o|Miwnz3azNj`{- zQtGg5tvObj2X-RM9MLW_jbE8>zrzkT9>mnn@ZX&kd2R$k^^ehx7QNbaENkcmYUSfF z<$B#jUQ@CsU)tP@=^Xkl!V%M%;UpJKU}6SAqaisK!85SZ8QT>qw2)RzybcM>XjsF8YB+YR20zm4Y;Y*I7#y(?HDuKTgpdhNBZ|& z`aJx#K=A2O@2@tn_+iM!6ZR})R>k0b%+n+jc`dP>5G61X2BqFy_908$K_RrZwVD@@ z`gIG^0GHmL6R2U>jBH!uvI!WjIMc3_D<9OUQQY8}QG`z6aXJ2WPGVL)ad@3+T{@%a zLB(<_#@Jjzg_uC5#@Ib(7qW}9_f=cD5*$eQQsO<&N4V@OHOz!y<-b1-tIHP2ZMPO; zgsaG*YQnVIb3fPFlTkXA$M z?w>2zbXxrRSUcAOutHsWc2PA{-+gFR!Gy0rEcXuKk^7?dd|H5@5f$S~@%c~sjyO_?t!byd zJ2ly=-^Lk)hr^M9*PKbrwl_3HvJA!!i`Oh^axC&Maq|+ZLdhaU%_w-TZkf)Tc2P}B zH69p&S2#{gm7Cl@(#NCC*1=Gu1okkLiy+{VwEX!_AyH=}tkaWxZHs?5>X&UO64d-! zlJzmE>amEPU#_Ci!Ihtccm*t*g;lCl5*a8PJ!#twE!`GOnu|YRxsFNOGG!)Q5Ok>r zRUDRBeW`O?{f6lj&Qm`yimFT(%r4Jh_dxg~0N8d^i@wOH@lnJD4Aey|B7e~ot~)HNN>gOM z&MtkK>G6^sAWXOMHRq6HX!TvQn%3xp%tZg3QWye(&p$rwy%|d-Og4Lxwof~1ASd8J z6&kItN!q?)!9hrd;FHnX4ZRiZ|247pb1cg1#Jlr-;9YWod*Qke-JVy8w>RYB+w(3{ zBiU9}0{OH$gM#?hzZJ`kjXL)EKATO>hD1HcQ;oh7Kf{&xtFUj1)SN871eOcZL5&Qdi*0MX|i z7~RGOX~Bu+;Vb@4>^@93@XAOY*Q1ubg4g*zGtW>ITGD!5y74umF(HmJqMX^*T!>};694kv(L?`E>?RyFn#*0o-8eLjDlOX zP`8>)_kq9LuV)+d@BOSSgjP!p#DasL%;}BwoKR(YlVF5eoln}DOh{c0Ok=PqiC|I*D?mN6@*2I}Qw4nOWt^gGlqp6(LN*Gcl+L&vqAYw7P;RRT??6)jmH7 zE>f97waE_@-Yb+rv7#Sh{GRJGSO`@C+aotrREC*EiG`#~PV3N%>Q^y`S^JaTqZ?qo zj1Y;=*Kz7I9yr{yB9zbf9gm9EBN{d%3;fG1+0EsU>Y1M`ITxQqzRupY`J_+VXQ@ON zj?@FjdruvDb7k)Zv6Sr2&9Nke<<9Gh*-k2zC-aL4`B=S znmBx+eYnHL?+w{%D!n#%51frB-Avdq8IliDf>GAcReH5K$Zs%X0Ow!lucfiGR#Zt5 zCL=roMT{MT0|~P+8RVslz8^R_rU*DDmfzYIs$ajDKGi1jF2dXlRkLHz_zo2Y4)oY+ z%BSS#td-^xNk7EkD~UEvt2k`%Rm24vKlfY#Df<3$eX0Bn%46!#Dpu5)$rAkWTNWcA z!qr`B*;0ktgkFYqK2AMq;#E*5HEGhDlxsMQpEZ{V4}Mxb6LcxWuG6a1AR)Nxb2FG$ z4HH}Up|erUtIqApR`zw37D7d;m5yCP@s!16kG6=9Bz7(~`&kj;er4mZx{6MrIVVLHst^Po|0CiRz+_RrnBj@!L?AoOm~*- z$=xljroh9hQO>?)H6X(z>MbHN#`_x-XgqFti%+;PH$c%B!w-`PA|Ez}qdj1zm(L@8 z!_7GsyT0Ca*`c2z9^yhwxe-P3ZIZROwUpz>;My{b@C6wMDPyPWa|o=AP@kyb7VK=T zIl9Uo7fey02;Y6R*1)L#%Om{3lb@ba+jc|&)M7I>ryjQ1>r>C9*hOqX4M`JahIU4X z_zGM*QpT(0vZE$4w>DbK0+~n?$Bc;j3O?P9vjJSzuFV#D4_vgcCE5j|S~v@hm-dm+ z63hVI7ByFrd13q$siD`#&*9_e=}O8TAi$H%!r41aDwPMLiW}j1wI*4!HD#|Xb;n&g z>dF%!jgVucItaYzRIa@Mr8mD|Mgk~>!8P^SH;sT*(of)>F!SZ&TQc2Qo}g%uKV$m{ z*MBJ=3xu}aUO#r`6+|1~t)MF(2>+yDB1?_gO)2+$P$LjPX*+pY24p8a2r?;jDH=Pq zx-95n!|H}nsrBe(RbCL2mny#?7j)|M1|K$%{U!{ek{sxAjR(R~~Yni%X z2sFYu---Zmb=$^Yy@onSL&c}UzdnZ2b}~`oQrI;~t}ZW-H}riqHKx~2O^H7rIJdm~ zsni>}t)(&?_6;Ve5w$8q8{rkFp0YcSWuEA>3_s#0Jb9RsN%EUe3Za&F3r?xc^d(A$ z2u7mBN?;MHX_$+z!!U;^(M4u|AyH7H;3f%kbsM2*VGKzcbiHSJ*Z5x*6l(Wt6FLXG zin4SN(zWG;if0781!X-Q=Pxb<)Tz)f$E+i&fD!a%>c$m+3#?o%&iG9PbSJh^B|U$? z>M3NpGDxWA0LY8lJsNNx1oz8+A5ZW(9U>U&hsnxvAw#Z)jox?C+a0_IY# zve&x*oX~DDSWLi;H8w`pQWjGogmO|o1yO%p+yRPI6_EKQAdHz0jcy9ln`MS`-vPAT z8Q}7o#kT~@qu(2q=L^nU#h&&;8xicH0+m2ytN>p+As1p!+oYN0yP3|3>5Bd4a%TjT z`|1soa~myYl*44la_RVN4S%V)D9=}tHz8h$#tjT7wwffP=`=mNp-wqJ3|le_Dj>q! zo;TG{v3o=j)sQH@*!PtA3Y87(JPTHmjMcH9y90}ro+#IE4&;2K{UtV89QJxh%+5WX zT?j#`K;KjiKlMhTy?8Ms2b$xJEPzLINF!FZfA;Ixqs2df38!pr;pB)S9zTGBV0~l` z9@~uQCUraMQWUPtuPSu{j9kp?Rh3c7+)+u%st7T`)-&=oie<(+K-YWqF_}Yf%zxq9#$S4oR5Lk93`hftKk?dY~dpd!9L{AGXCrXxKvav*VDtFSrfwb~9 z)N3kn7~%7EN~-0Q2JW9Nk?9E2(r6TXMS6a437Uxz;9vEc$@4`Ni~OY zFxNBaOr^L)d!DWKzmULzm3|-5x1s##QOgaKH~d>dp1cWE^twieC}1Cnt|8XUVS!(} zj8IVeaH<-$*k{cfli2cD{4f1*we#NPnI1U-`QCa+pv;%7Gru-Rv%yj*&1jkD@2ZKd zqX;#a#b}Y&=fr5IP+f)7u6AirJDDK6@`$kIrVgiex~$2;0r!fR4qd@C!+sv^9jVLC zELu-#{P{X`7WLKB^Mz$nuQy^-`M_D>%{?NNXyohS_Q5RvG5jhOA3T-;gwF&`&KA z;yB1x0@metT%{BO>xa%=4KtK

1H&aBHphuqX_E-G3oh2Ngw1l^q_|{|Dl{eZp?w zSV(g&H=;BPVu9nOOT;il%IEy0(tgCwCv5LF_aHwPIK7%s_a>|p*jbymzB{Kd-?qsU z@OqkF#HtrxpEtewv?B8pnce5&6jj{qs5C(UZ6Z&kf8aNLvvQRdB@z&ozW-U%Wa;n_ zm$&X4#hI?u@}=gI`h;^x-)Hrz7y3U=&d`YV;r4lzEiX&soyah8xrR-MGM!Tzxf1f<5|m$LhX3@I51p9-F=rq_8EbdhLuP;t6j zaNU#=@wWh;*ld!c3n$0{z_B+2m%6VfUA8BwD>2_CTw$kB=gzBQD)rrdUea9`C56tK zDt?!+i(`+kmQ1Q;Vkrb{*3TIc=-PHjYyxr9!lunNpcZ20QE(t5GOY05KU=cTMU83E z#5gJ}3`{7TO8q9~mHQR8;hp@VaC@<&Mn;e8c`bdC-%5~Oh zqsL^RLW1^tNX|Yc$7%sUl$nRqQzW0hGgt!7?yNtD^OxntPgiJb#iHePQP`#OHoFo% zShA-TFe$G)L`#>!tvN9x$!B`%Nz3uqGeY|DrehignYZ$h%Qp_7`*5dv0A zZAT4mLjtd(BGv-}0#pugZ4Ug2^Aol(nl*??AzT{>i-ctBcm)@RFq;KD9#94;qo?3AWDLPEX~joH8U^na>;Ul+%YC+M z?%N_+(RUQGmLM7S>P`-{#nxJ2u)^}sP?JPldopos2D8MH5~t2rD@76Pjidxji1VtK zjjGPIe#|=^mq4JvjsoNqg0W;I^b&Cg`7sV3wF5gIf5K?iy<-z#!vC%tM@6wtLfl?< zgyXE~6PSoo6mGjuU;l-0hQFa37QM;Er+{Vd>AX=({O=s&SET24b~>Zm;bh1E35FO{ z2AAPk;H@TV>lrnp4PZE>1?Z!`XL@{T@)OoYuy$5dq@wZmKI@~~49l3qa@#es8Uy`5 z8ImGIaj82%r%M%xq(MYUgX!)CMOXFQ6UK_|%+5Ae8r0 zHqu_OlFX#3-aA$eM24+57G;#euRH&4F#mt+<(`sVSSW%fE-8-8vtjg_cWAtHuE2qp zJ{Vs$unFNXfNy!Aqki^&j3YUm2%#F1r%UVTq8Vx)iPXjd*zJl+V%|Svkm&CCOA zEJQ||?#aUy+m)9Sbi~8tO*hIff)^o$Ema{c&&0E=DznB3S?wQ!)(MKcb}za4VaYJ! zK?xktb*N6&_?XoSGBF{_Da{6+lCYrjxFqa68&dQA1LCH0bI94YB-58jHLgL&*B8^` z5VuO6DH#yB`T|}&BXdHXBuZKZ`dRCE8hJ7~$v&Z%khRwDQ=%q(I3^!y6enVB6yI4l z+9Ql@fcCQa-v{BfrCcMQkIR;uuYLEJffUY*4Rb&&qoxVNNrZpt+rNNPXDg(wb>a{U z#+uz>sL@O`D(dETxR68Q$PsH~u>1J#ZsTIy>}r9yf9-9!wLosO$FNT%GS&;F0YQG!Cuvck%q-J-1Y_J;{0`N+JA_(ucn z!itbde`@$(X=_G%@R?B2Ia90=2BFlsR#Ktpo1;(?A~og0BkqqT z7`S9fqo#ULkh*Or;JW=TjqatLjKmEfysRk`%?OFc%9=}k1O!p4u;u(OTJPGp;7`r4 ztd#c__yVzdWps*r!`nzI&!Bc~C$v1j_Ve9e$2%EoO`bHWPY&ZesPumurmzIE3R8mx zlrEHw1{g1EBtYLQqVoGJ_lBiRAz@=Fl$Q!-7&$%L;^gU2c@eZNEYm`cu}Y-%Z5%FU z6>?^S3n~Tf$FgDX3>=}sdD>&Ynqkg+C9k~l?saZQ&uj`(D6Z9qAc-}&6B_bTKsOAu zr_r}BNy#-sm$8bTw>1sPv^>JGJDA#kGJOGi_gJ4W%3AnFD$brMd(t``n|<-7U`BrT zNku}-QpNF)4exv~w~H7+zTIY?tQd3biM zH!esb&lS9{V4c#kcHF>twbm(TdY(i~(y2X|X#`dN%S(PuFL$&nR<*S3_O~9i=Ig=@|nT z7tY6LmJ%P_=YFKFVm}D&ti&YK$ULvmD`S@9S2ZJ$B=NE}7MD;d-QR$f2C!1AlUIdD z_WC-KzFl40`_4BiVOSqn3)i`nlKQBbiQ<19z|8(5TS!@m|dv87FkyClbyuQV`J>ww?q z1*u*T87yUhi5aOhdUsQ0PL@ph!HV;Qs1mu$)evqXSEwIz}Uu*bFQ93)n#y~Ky9vfm3y9?;@Vt`W} zk_E3v(bPbERVCx7kT^X5zD$9JSNxRQZ#oU2e^h~#*8dHvwwG*$kEaz#*wqTL3rii-j}=l;zuwrr)BqJpYHHpfB}J+rI1rri z3*4r}&n}&BE7cAJgE}<1zOH}l;f6B*^E!}S++XXuIdo|3>v{bD?8kDk8pYqv`UR2A zWiPdWvZsNfkS^NFWL_v5%!m2XtI79`JBH3>imWQHTYOhhSC)sj<9w~Ff8N4sUralB zCh4Tr$fkQ3`F>r4NWgoKiDBQ@^TMlY3o1GF=h7&0%Mm?WYo~LgaY)uKjIGs-(>}km z-Gv&eSPv21ob?($A^Ud7TLu?+?(Hm|YfMq~*p7E)qy&%j^$a!pq^*-+Lec^!!6yC@ zQDme8GLn>QH`x{79%>;|dOvjoA`mol4GITkWs!e?3=9FC*jL#Wuw~q&fsK?n-OxSJ zL9ZF;!4h%yAtaJM7Z(rq-ThM`)|KsvERSNE>RlP*g^LSr?9q?SBzD$Oh7LO@vw1*# z+Cxx<97AP3oJyoa)5kx<2mWZuQZK`la0c^8Z;_%odUna$s31sr-kuwHC7;D<3e>H& zGLn@=Skd&_Rcd3kQkuJawRk~S8v0Qr)OlsRN<^&A{CnaGui-=WP^}kuuwZNAGMG68 z;#AXVtpp-C^W={V213FjZQMv~UR#LH!ql&NzZs1?6tz1xd{e=0zQB71)L>e9?c_>T zqHM)AA1lb)@TA?fVT;6aaqU)Y`0J06ijTF2IX(d00|wi3N`1o-e+B4;j}wWW0eKXL z>tW1*@)hDm&f;$CkARMkk2%2q9qhS4Y1g^G$pIEN^S=2g@yt+1OX~-4;{tN%=T2;5 z-G%PU_#7d!kHmWVXDX=L8(H*BXk`{8)lfL4L#&Diic;0@d`B6w5pB{)iat{BG3*#Y zrO-Hhbm!2W1+$p7@QI*B{HWk0e>7)r++;iKm5)0h@QSZUK8j=IjY;ZgV6e-Dd{R_~ z2w}z{#hOb_b4g8KXvIuFH93_VnxSEd(x{(W+a(G065r(97=m;v80TIVZRC3JOhCQ0 z9<&ogspLO-L;xik&USN;OP377JD30>wW-KX{S_7$zC2_mxtx|#bWKm-^h+9cDKyy)rY|weKY>2BfqN`a)Cep`}Dg^xq#Qs$QX)XfB_W2|e#=1(Z z^#u+3ZtA?v+dU`8TVTyxV7~hWqF0`aI!79vAAmm&oozFqP1C2tfFA8960+}BL=;s~ zFf=qIf`(O!*71hJDJWPJvrV#7F*{Uo3pbA!_d@w_l5h*}%GvKvGG6lR9YQz#BysMc zX#S8H!b<)I;)RH9dlFM!f8BK%mrEYvx7uEouNmF%bWlHx()%6>FqX7wEvW<7j6)L4 zo|dGpbRg%3%c&CyezLSIxmmmEMRvJilQcBeoSR7nD$&OfjhIr`MF}M17lcz?=EjM= z`N)g`22{8gF2Ej1u_CIH7YhnVp*}AbTL?_|1O0n1rfQ`-ze@(c8BvvVHo0P^d;QsH z#5trIl-_d69?#B{jQ_lhydrvP0;XRzwP}9&QHvechfjM*tbSwxH%2sb?Mc+Gy@O7n zW6y&X;!<0EtYFLa6{)2Aw82gJK3XhBDp`5>mPq^N=g7nz(}`A)>?nXsLh6pkmkG>u z-A!W-jF|=tg>xTpnE$W+xmC^rt6hdC0!@im_(zndS&zKxG}|}&$d~v;Fb6p+V%Q*Z z?RpZYCOSXGmE-iN)OMW0OS;DUIAMg75z+4!w4FOxfCb6Qc&i&1jX!Z5C4HVrM&&YK z?qRk3{vYe*Tc)YR^_=CzW4s>cUqZo3{+yK*S7Mi|S*1u)kK@4{U{d9W1@%^><-;}r z)8ObC=htzyA~Qv`pgP96q@uC!^0b>S)ls_=3?{6?p~8iXptpTe<)PGgiCp-qtF;1P z|E`9b9n|&Wg-A5n?GR5)H}U!zOuF#ww10PI&(-88;{I}R*N+Y^J(Gf_W|*UU3kBR+ zfAgAf4tG3PUNLuj*Vn2o*mYUsD7vSIpT3w#F=qg=vEU!bJ{&5GDBDl(M`_3Ut#6j1 zU-iOe7=pe7#Fsa*pX@d3)y|MF&aSvLJO>MgShRTJMsonV6t1%=IyGAJ@|w-&J_zUl z(u0%%!i!!B!A7{9Ofku1=PILZ6B<*x9z(zoCb|D6U~q=;PbIT0A)_lv(HlSr7swV1 zAwsj0td)91-KLU%_0d3RT+6|`TTrez)jiAA#z3;IGSJ+O#vJz(73n9EO|Tu%nZEuZ zN%KFp`@g6Re?;6#vrJ1{Am=OW!VCNd<->*Cu@an>we9ef4bE|A5kKJ%fZ9_`=HRN) z{`BJKxdM%)6Ht~cfNAy)Oe5XXeWeE2<8|K&9fa8i>zF*j!)kWG_a@kt<-PSh&Pf<2 zhX?Gst%BB=V&IVoVxqKXocGoE_qzGHhCkENiVgkXBo#pK>Lxjh4`<1=_`6SMi$?1~ zV&Id00SqefAx`dfORAs89{^mJ3KTZySbopUad5h(JaJV`)ON4mHKKP6LGtxys*)>1}at*X`C8V8Z5YsH9r zu|0yf+l;JqHo;cmJ*(ZIrVS3(#VKc=Ehha9+7Jwco{pO>tx7mWt>!$ahfrl|jh3Bv zjrS*%+dj_1^H)RYQ-i@Th1Nso_{ceA`fqlcW&dEu0@mO#QhAnDa>{H2-%Stfsfn%q z4U$gXUmQwDt64p#ggGmHl&6&vQxQFXVRoVnphae@3bpc%#i=Fdk1T_=I2cWA$eitf z?Vu9!eHl~&;LucpkKijf#V0r6A_huyY&tk~WkvRWw@WC@_p^u6Ot1s=^dEWoVGEd% zo_te6zx3Sz+aWIaJNwO30}||I4~!$UeOJBeqn9 zW?O;-5Xb89pqF7RWe#=Axh5{@V?_(SEpzB?*l0~f_|P>dKhS8EM}yV?gUy;?4$U&# z(Kty^q-Dw+7yFCjBus1AX~^Sl@`<*0k772H8dpJyOx74Lx~5#X^RW4#?zI(>nTVjB z7O03@3WRmxjbephMvK9FQRj82bVwkR zp@%Hi8tr`7+OF5&flns9+?C-e?i_G*{ILbS{kMUF@Pjl_{~Ah<&dt?tHfAxw*dIQ> z^XXMly#}*qt}1?Qvmc@`$nXVYnf`70d&mTHj{&`*31QH7(*u2%YuWGl%^0;C)d$Da z->p+slxS8;lr<9pvb?M4Xk#-xpIoGgACVy<00c|`<^`}x@9#6p413i z+j!qDX{iCcC5PoCpp??p4Zi82gaCt;p~aiAjNU84xNw$=%wNH`ajl@TrnHbZ{Hb?G z{XMhEO~kgUM{_B8DCG5QZAtl57)@;9XZSA+3h*wSe`eZHgKSx}dxfTQPlX%(bBoEX zvA9FL6z6vWpOqTsD5d5>MjS&!Ccu3C`bI=~MGN(1`?i9#J8v5U67hD5Ki=}#bJ@bo zD~>vCHYgwU$D-+7pj@iD7U{i9xqcLmR{=Yt($Vatgk0#-ry8O~k}D2; zrTzF}W|eBmSr&0!I8i-jqXsfznxWP{%!xjC*-kP!I-MA5|NLPs#lJN_U}Kn>!Z=L8s28woIU8++ z${45uOGmM7Sdf7IPvkW`<_hsI>!us>gXL_Uxl|PC$%7jHn+LlTYEw6akt2E4raAG& z*HK^9ZI4ypCd2wG8!aMx4eXm> zb|Fsie6HcEgOREcD9jdYTPUQd8LV*a{MPtxl6qjt+;k&JGZUEu%=)A3m={E`vMJC| z?quNHktL2#*-!UJI8Oc5XXx*$f(D&(LjR@`m;Ezt_BQ;Q4y4$e!S$s6T3&5e!^sAC z9FC6FBU>8;DtrtG|LR8lm&?@zc3WE{?|zvR21u028g~a+`(mDJW8LH?(rQJ?0`wZ)o|y_fMev2hn5{Tqi|HU z_>~(oh&SLu=7RC7$LD@oyyLFO#6?E(KLJHKHPEoePdkZZuiDE_m;Je5I@d2U#J`tg z;zI6lDhju&h_ulFn73k}q3&}brOpL_J(>vC5W~Dxy0M2_kHpG_1u9a@41uuPcOpdJ z_lys}T%c@r%a0{sWl;WcmE|rEW(#v8-GO3(R(@PCLSy>#Wq>B}eRSCrKRQkBZkL~y zxI={XhT{07rd0NHlnGKb6kJDcL!CK%vxFa+@ihlbNJwg_AE)9mYi!KbV?hTx=9)YY z#2}_+V@q#@99Q)P-hQQ6MSxx&m6KZK7p!0_K^2;V`4DgkI?|@ePPiXjN6=EbMFchotCVy-Y4M#dC|71^U1%imMGYD(%fiz+i~M zTI45T3=~V+I;uiVJgIs;w{NC-O8pjFeXS3t&!r;W)Rq=J&*GK&T_aKqo@P}NKsq<1 z`|UBFvYn#ivl3r)+HL1B1&rPx7vYo>@b!GE^Z;omzP zW0Qevk~>$HRbaSUp=T^AD7+RiNdL%^&Cru2`}-J6^ngIe6shp#g6BwLMZCvf%nB?Fk7d5$lQnu8Iwy?|>^oMsc6w7=oGW?nhi~dZ zSLIP(Jm{3&1KKog)GK@R@jL(cF>s>xM+tUQ>q>h?Qc?XRe2Fa*6ndD~@=*G?gY}A$s*;Nt6i(BtBwZ~z2`A#`DUYvc8cYm*?V|}n7 z%VNnRh<|C=4(N|e2mm=L!?ol1Cqx@H*=;J;UYQ#VR~Tv*#_ZvUsir8SxZCe`#1R~r zJ2dL8=C_;LKOZ0=RxL>AoQ!#jfRIQYh=t5zqm(XgZHmkG2Z(O|sEdkLM|_c2pfNjB z!=j;=A|Y{7R4RrQLN_H*4LkpM%Z}Q5cyV;|>9$g4a{jl*k3kg8{5k<&Qpv|5 zc~1*H*-Vnr@d+=u73(5Wcxx|cknz?wQ0DP-#m=$r=r*Ttv~cJie3SX{#w>}^Z@dBY ze}{b;|8BcqlDfidANeKKk@07oeQL43;dl;t6wz5r)D)Tu>#|MgzyEyw`ZoIU7o!Ji zmzlzxtSlcz6(VIzrfG79Zg)Vq?tII|xLp4E&tN_dPr*h(1up%ynae=iT_}{lcq+2v zU)Qoh&OS&~F6gF(7y`A}+RFh|{VWR&?+E7*33wli^j3=TONFuWNkC=`-04 zf8%I*!Gvg{{=8tbPy<&8v7nCk-$yW`qPU z*VL_9+eE;tTx?64NX6EB0l9@{)K^&VRY#^fZEx?~K51LRZD59epvI0q_~S4bGjG%e z=7taSd7Zx*r}oRH#%owwMAOp8kji+ShV#vJuJ4Vsre7dq0KQWfm6BN0<8tv(DBxZ9 zEu4v1`@%SEfTk9QdPP{>u2@~G&9$ukQ}7~6@tez7V~rYy3c?(XRO@u3B(hGe4Qv<| zA`2i#$lR}X$(Fb&t>8(4gJ$e6GNn8P=$Ms?ZLB}{B}D{w0G}_D4gU$`nswhvvOLP_ z|2fx=)c5f(d_|H`cPvu@o;Tp|uhoO>mB~_>Ksk1?Gj!zeb^Iu%;P)(IZ0h&Wm6fLW zv65qx;f2Bf8fP_JJ75In9mcuOW0fVVBHSR#4RZnuH__AcYzK_dpk`cn04A%i_7iB4 zBJpqFOxS-$wCv-GX0>Nw?XV;+;oT<;|0FR9q08F@gdb6ZilGRN(%$w3UUd**=$cSC zU0=7NEP%q@Z;FyU>|?{Ua!OK%`gPuSM5O&+LV_6=Z{alrB(i^8pjb*nm~keS^3$Ks zb>ihz5=BZ-*bzVPY!TUgvl-dm5SfeRYjE*XJV>$Nhv2__%E~Z4zL~`WTA!jCG8T!B zcf7?56*=jzPfJ^tqVJCgvcj1}?0g%J8c)WYNGS!7S*eVCs~cVuNY7P>+|<(BNQI9m+vSgL@lp}4cUEN4sVte4M1Qh;n>uktpsQZ zi*gIWU9Dh)x~6lxogj9OZETwpCaY`(Nz}X7wpZ>h9sO*lG&YIJJR`rlzVCDCNkmSi<4hQ t%PGtJDnb}jfS_V+rVEQyVhRAn&70aKtnsAwNug3cso@@I)Z@4P002#x!g~M! literal 0 HcmV?d00001 diff --git a/src/test/resources/test-macos-l0-lh7.lha b/src/test/resources/test-macos-l0-lh7.lha new file mode 100644 index 0000000000000000000000000000000000000000..cc14cb70d1da4bd3b64220f6c0b15b973375f65b GIT binary patch literal 37453 zcmV(vKt~9Ii<*5VVm(t1{`l(s=Rg1c zbDitFMEa_;5veU&V-7Z9fUzc&8X6^HS*tKMAb$P#yICV`Ki&TBzWn{Xvif$@+sn_d zuU~&I*=Oa)+n#(pxpnE=pDg+3r@xmUJieKB<(E%SE`GUZ_j!Ngrd_{1Ir(|z>C-OV zvi9=&b?v7=etued=kJ#;eZKE6{50*$pO+l`y!ibx{B-T*>(8&<z8jYeKYsh!m{JbO@G^mm$qJB+bobZvkKgI<(~p)~{=T02|F74N zzFPnHzvb7bD}HDF`Q@*R|4n>hzcZhwKH2ft|NZs!9x?47cbE2i+xfFTb@=-6>F1WX zVf{Vz!Th!H-z|S9EqoK(>88Gi$JZ~C)ewjPCoux z=?6P}CcY*4>*&5+eDdq%rhZ!f=DMyrRO8E*p53_gx0_o=$KF4GKAiRbRpKM_y<1{> z{43(dR@PH`2DqWG@OIBDVQc5(W$XIG>YwY^*G&GLUuWKVdvocYxDo5d*e|Xq@YnM` zy#4jnm$q8`uDTT(L~pd5*4IRNdu#6Ad2@eT;U0{$C)u^VZvz*v+J zC33L(jcmcTK)#tqWLpo0oOoMV%L(=MKWO12ToBhn+WkJdz2@8vwvdK%_SeaE(0+H< zzPLTFl4^->Qr0{86lsds*KOpVz~HuHuk&)w+HB!E!?tMs-e1ozuODU2j|S1S zY!|m*JAQiUN8694yq~t8ewyV2bAN0h;M)rsAo+gYt*<>9jSUP$SzY&ee=R$GHPv>! z{PQ0rZYgV?yJ9nOb8gJzhOKiGT$>#6UlRRvZ)dikmkc(_|5eHY(HPmLzCM4e{rYyy zW}J5PG$qBe6lhb0wr~A8`;H)6V_%NEe00}bmfGO!wSEH2A86Nn_&vB{^w(Rb8y^q# z;T}8}mY)5s!hiUnru_viLhIk#bS-^Dp2VkRPtAWmnR`c*YxMtKF~V!-7{`b1U32{$ zM#kwINKc~m%cj3a8@sp1p9Hs(En+r;r?oids@o+#x31AG$6qY7)6F3t$6oJerTZdR z_;KgUUHdkGZQD}fn%oF$`5%s+w&zj5do6lg`f7-FRzQmoM)VBO{~A8ttB zfr#ZXx1w>zeVNb#x9FYbpV5yCeM?)2SKr(2nX{?%E_;xEvJP>t5Vq}CvA&x1!3$~f zv#!|2d*}$8w$wPjYkwbpJo{^o(1t{EbFN6`{Wa8uLvRVepfAv zvAF$z)v{rC`uTX=fexQF(e@C+8Huk4XRd2wymorsfd5W3@lt_Lxt)cI$ zeoSsYc)2|i+kxwDQ(ymWI>$m|w&(Iaa%zxS(kLXkUg1L_HxQx%oW`oi;y9x6s>QUcU}ByMLRG2sQSLa06~* zhAYw?n(LcgQtO&F(x}Q{i7R!Kq{ zab~vaHLU)5Z`3>=zNCs=TJk^mH;`+@udcf#e#Kc2ndD3FWx+zY@7<3E@KChS`Mwd` zU_oDB*ltlzNgb`3PJvMD^^toBf;Z(P1V$2$FYV}O_KNTN?o}dMGb#M(cNl#o_I`IQ z2>{ANK8LY4+bpWxADrD3_Oh!B5BWzfjI4Q23fvPqHJ|2%|4w$vAi|b5%Wt1aoVvOR zcMEXd+8KSkl&u{NjyaE#UWDp;odhl4HxO*2>!kF!j{jYB*Bbm+j}7BKfGvXIUE3}0 z_)rqk;8Q-**t_VHAShLidp8!uWr0@NRA6)P|d#T@7)z z+zhfv^q;kt2W}u6@q;Sm@I@ruc5|+`t@!*F0VD)hOZG;R5@xo0jl4%#F~E(v>vYvP zLeuZTHyg5UJui4!wzXeoO582!tA2%uL*#oQh~21gU68)k-(a>`#=HF;LpwcAA(YAZ z3E1YpV`P6VyABh`RCI0`QtOB~T^;p}Jzf7eOU?eUb~;rYZcUpit%Kt?ACF>?i~CX1 zW%}f9rp{Q~HLSb;N?S8u+CBZhY-f4)Rdv;HStfC9y9em(AvH^TglH|g8NU&4c+VfOM$Pnr3Yw}c)JA!;pA;dDt>FAawOa&BpttMNv7 zvf@VXDuF8R6Y!zUBy18V`6gS= zqEl>g6Wb^=Wo23d@e80~@hzR!QLVWNz7amMp}+o#q2cGP{i*laNYPn<+~X1Tum zXHBcduTQU+0?m+)kwv`OAUhck3%0(@?XobIB%0hCS-X?)XJX6D-8 z#~enNQy(@}IQoO6?>k%@6tW5$_bI^l47u-otDe}~PGhaUQr>FXdEsrOLmMYUfTlyn z{aJ=B0RZyBk-8xY(yl8?QKeVTli|(K|K+cQI!#tZ6JOWQnOhdvT6_R#rZ@j?qgfV^ zt?jj*J!dq?AeZ=A`;2VW`9q$QA2%7W z$k(43^ii7euvgO%Z5>Zs6Ni{Y@H7r*ITP3L@y67t_2aV3JruTmZOp82^KKcLaHWS_ z#}pRD&T+02tMj?TtVdQwv@mwQ=%l|7e0hJHw1uj7_D9P~awy!xLXUi*XT*@3tdVYI zj=EFTM*D3u!rc#uYq+xEY55l_zVS>(zPNUOY;$EsxaSN<8I%MlgQFrk$wzB+@-V_9P z8RpAfRiw#R@8QM(dpsHzjWY$T8py_m5p9DWT9Y|&j=BDD%l6;27H}I(JW)D{ zFX~K%+36(ydu#&>cFS_3u*YWz8<6YdBkkps1oAmw7p88Dc+#!b6@ENJ{Rl*&-Go=c zIt*nl^2&%3sEn{ zYpkVo3}?p~7(toss+Z0oyryuc2#>lWAzH|MfVrc$9|sccp3d6+lT<%OlRxDUJBzT8 zymHJnw~W8ZqdVb5way90yEGO0@d8BL^#uD{?HvP-374UP5!sCgBKYU~^gCO%Wk(lY z;aw(1Tq&MDY$zU(LAM^Gz1B@iRG;85rJ(Q!nhMa9XZla$)X7G-^H!&JXQ@ z1^~FX`Of|Z$x6=RB}KK1@5=`Qm@~VzFA9_Lhx-e6T?&DOyR==cw?d8GF!e}T3g5|f zeHIADq&L<*nBd#cA^&`JD%_m>!}54Vt-+sJd;~V)jyGXV*;y zRNN0tQApO>Qjcf|bGX5RLy48+{d!jtucp^q;dUXHoqlp+?)nA_*F;A7SV@*=TroOJ zl$a@5q1A8EAtpIVQFf2B6~AE6_gtE-AdSwKgT;()H2S^_Yw{u3r3SUfoz(-e$b0#IDfOHs)EB?Aq**!;1d92v_a1vQpv1nnDErL!0-@9AfG0kDOSUB zfx(8o8%36@rIfVsJdjQecGYML#I+6krW&JBM^2I7#{XNIFzux)GkzQ^s9q2G z{N^lf7chTb>3A)hC34Lo-Ua2jvvm|x(99``E1!4gh_LCrFv)?M3HwAj zacP&w**yFItPDJ&%6oALF?P(ya!))zvI~k10(+LXz!epjd(Q)a+;dZ=W@L2m(|p6| zc7g|QWFIsd>YZ*4_``8H$R2t#5}q%P`gPA=%1qze%42g>@c-EA zxMGxVJ70K+HrH15gOwt@Id2ba9>1k~M``!~W&meu3ua_I^eTN>L}w$g*d zsC1T$6$HNTm8rB7N!O*5*_h(|+KDhec;B2|#m5|shDhrY8VkDcul{V6SadtM9j(K9s={|j(b06tK+PUIj0N~lg+50eMK?a< z^6vzq=qm;&zuD`rFMeEpnX;=I{FZq$lwP{%170q*^}Tm!-TCW{wvG~c3*A2!h?DHd z=dwm!Idl4FuHC;YVwWC$Qv)iMohW{3;z=#guLv=X4URfthKktSKj$2N4CS1n`iVr> zNAftV2(Gj_ifN$<>z6%WSasJC!Vj2vUJ@=Qlpj0afZh4pP(X!0DRc@LFGhUCQZ1C2 zw-_Qc|AWR6A{(Jh6?Xi2 zQ01N-|CvVdwm>$yi^CGlI>1?mIz`ox>DMVAPB(B%R9rP%*8A~(b7z; zuQT@mI#f#zx-Bk8?62hKWAx2B&BoI`&Zh2>9m35>p7 z7vYo<7d0V1Wy{QOSM`ML+96Bl4r5PhFBX}e?vEu}Sld&|FIp3nMd^m(&5Fh)N?}H# zc{s?uJSqx{?7k4?kb*9`}1@tcd9*Gk#9p3rfcWhs3qbFj@F z(MAk%T64#wZU&U|JVuDh13b)}5Z-xp^i?aETT($ChyJpg?8WXsbBjI!az76 z!utM&CL!Tg&mUXQ9XRg&5Jvr?JCR8x9Mo^ggcOVkD?rh-87n5G3e|E^RQ)Me>`VwL zgsGH_lql`C7;>JGH&K2%8E&@d0^lU2;7rH+2?J5JEtiiOn)k}lK%42t;bf~2q%ULe)Mm4Z* zZq&Yj1>e%^i1h|^6vuC;+C(>w&|aRu5Axjf4i`(spz)gVe+)*zKWw^hc8Tn<+%_)n0nNA{NY9Oh5egqr<-?S zu$ql6%v)=11goPPl+v3~Rlj5qFch~#Wbf4HKke>Au24f&V(GMe;X!rpbv3BZB*|F|=-I};%;aaq=mmpQ5 zd_K>jIlSS-Xz)FUK+whCJdR>7=585X>Go6z1d-~ zkQZh#Z^P#odX#x`;Zs50%@Z3Sg61Y0cH9YjVETUuQ&%B$3}l!$5EgWsQcAI)t70{g z=7?zx#N2}2lE%s;CvNNz-VfvB5^f8(ZLwSw?M~OA+JE0j3B1sB zg~n{h5u>W>=V$dI@ZzQt)9LCuT_Jhf?Ug)?d z|6nJu&*mh&kZ@9m5rBG22oJvA+W@oFI-qrrHS z)wc={#B61RNOe48#IQM(80`?+droc|C$Qz+jKzb#sx-u2$%0OGi86?=JZkyd^9dq~GJ@7tip2i{!Ik>Kgp2Lj+m=ho+A*SX80KV~Gp55^}SRtr8 zbN387g6y!HtTZ3D6-G0o%%IAe;uTd(kki-@<03=pi%iv~1Epk!UHBcgMvY;%AJU?a z-vi`knc{R3B6tty%KrySi@Omdgn%BMf@)`Up{8Z(yD%)6^BL2Y8T4){N5DHfJdiWH z{5~ywDS4nQo)IMbbj<4Np8^a}9y4 zwM%_@mYBnj(4^SSBQc!pg_LN_0X9bTj6k&l*I$y1t_LV`ZEVUN3aAx3!(A8&SW%fx zLwa5XPNNVbQ3IsZ_Qc8xx4?E{`8+>T8|`uK<}f3iSCi;0kt`0FINVRd-ppA5QOI5g=M-^w?a`(&l$ zql&vskz0POrmUdYIIRi8ZM(*r1#G(^ZSFdJwi`UB-Yomnu@X7gw@U2apt|NF+W*U( z0d~AQR!|p?U@GBw#&C}Az28wNOl_Zsuc)nZOd4dpKiTW`vF0G3R9YL2+Vb!`d2A&T zb$~FoxUO^}APBMB1s>Iu!{UdDE?O%g&K{R#%R_6O_d@aFipnoJA4bfq;1sI>RODxg z)^B{;Zhrl1nUd8y9=X|=mDGZ+E>IRj$4@B_f*}49XyBw@ov_4?PT3;ip`<_y?*X?~ z#@CA?po9GrU_=yoD%S|OkUQSCCKwmqUOFJMqEP)#)!-6Mojw#_&_Q)Q25q(u1zcfu z_!?5oaz9jOdJtCQJ>^Qh6je_IkLcl1%Ub?3q9a_*8`$csEZ%Ve**hnwENP2?(k0 zFzqnowY(fijuuvbe!;Kfb~{B3+%H}o2I&*Qp4cS8f;AU;ObHeONpUy0dKsD7M@{49 zFRrYEsN^o)n$wmSksd7NJm^VsXO3N-pCY9*aB^DO{GS`0#dvK+Bhkb`@2^2O#%GLO zyA(n(r=Y8hSwJBWJtElL^~^`^yf=5#{FL zXT0M$WZw~p97VNfY(D&Ss1xf3k^W*6NNw~benVW}ZI>P{Gl`lz1jC6^kH@Xy_KH+t z-LEd>`MWv5356b&BO(x-w;O}2zOH#nCTunacpsT@WAa0=ypT9f)L z$P8}><=?QI!`nn-289%}t9W5*m2~(EtuqU(or(veY+GnP=+cG@#Hk06NmC}PJKem*MLmCREYqg z_W04&2R;u3a;M2aPsAxrW>bd+z!+HGOh=hBpJYWkezF?TmqXt?_7H;$R_KJ627I(! zT*mZ2P;%d;`QXu>XB20Dp5#!+FM_i(A1ev-mh^Y!&6U+@aKWa6*03;;^NkVy&GCTS z{lg1iIPC!S@Jj?_e_0v*h=O|ikUS~l!4t6*xqTe5LKS-2FnhewV&aJr9>@+0pz3W52GD)%%Bmvi?5*`(&+q}nCD^BkhA6*0w%*N>SdC_$YDGPH%GN@u&CJ<^%#4>{( z*?c?P0p5h2y2zQg#NgtAjCEXkN(l>Sz-DTzctvc9C^07kPT+b82DG}-~FZ1*DKOd?Cz zupZ9y0~^A&Cn2sKry52r#hoa=Qj%GMth*+=V}?g4k7r1^s>EK{x$ z>ghPR29xJ7-Wz;8$c09Rzru7F;6nA~HaPrgOM^qr^583XY>o_n0LO6~N)pd~Kg=2; z7)?!igdn-$aBaE3_+8>a@90Gd+h2GvL1OZ~eEvEueZI#BiS_+MF_p&Or?8K&pl$e| z`(RU_l7}gRYKym}cq>F+&d~%e*-1l5yrl;i3Qev4GU{{5P0;JPlsqu}gpK|olSv_v z=~749-b$9H$&2QuX=|LdZv|&tj&!U?ZVpu6nvV`J63y4!lH-{W6hajU8Xd|A@AT+) zQ1c%`K12^MafQS4?=jn^^YFqBU)2Q^sAeSjtk1Zv33;-OVi^YmHs?2Bhb(v+($RO_ zZR9AC^XLddh6w&2y*2aUgo}P9+i9nfSPdqGGyyzX$kJb}q-GBbf^4AEXMsKu%Be3R ztLTFa3JL&xT<40JYZ^8XnB>O`58&MBPp@!y`WL6y7s)z+E2|L@zIX415P$4}oSRV6&-xO;5n7N&7I!d&?^YYGC)2d z@rQr!?e;Tgv)_MEJSfiv})sS5G0J9 z$fldmCaXhZj)g)R5Jd@t85$59E;S#T74p0g!>V7Z5l`{i5Z!3sIom@x-zFZ;y|O1h z9?P@1l@|{^?`Sf=j2uGZ$E9Oh(m#8Mjz@S#kV$&djXTCTwBwCRJAULWU!qtQ8H(|&N3m-nS@e8{QU_B(XtL0K}$0O zj5kFrvXSjeOc*7ST@E-{$_^^s$)|I*r|QYh>+W+)=s^v}G7dtDwU5m-BID|wKmdNk zRTr0;SAK%p6h}1ZFwSnlJaAlw5kK&&kZQnQ3XU@bd3>-P92LvPCUiw08YInC+KAt? z2@r#I8q1exmDgM?N98(5ABO+W2d5Bg)De~*NuUJpXA6+r&_L;i3{Utc-P_{NRl{V_ zBxMev5@V6TNX`a*J9vHY=!54{wdzxL-<>Xm$Xx^Ox%KJSrPkveiEs?FEY`+4Qh!Cc z`B5U3>I3<$dTcb`L${n2|KrOrGKIO@U-%?TDvP(_N z#a%lW(uS0Kj%S7%Q>^hy>K_gtDBgW~(2ji*pOGC)p7OAi_s(6CPq&f!yda1Ly5EmO zHgN0&Sv$j!V_n%fPgi2zhc1^m%Y6KyI)t9UXY^+ml2_6TCd+ET9NX>qY}KBs?9_{3 zYBo^V6NpUY^+&;#0a-j7Y%58L9Z-8OAH`@EG)2enCIqrgaT2()1!F1XGR9$FmqUCa z2+&|9m1Vit5m#%8LL8K4)bqL1ARn&2VAzJ@Ew3wA1m_{7%y@;SEM{c`j>B6Cgn3$Oqo255DJ)QDCO9l3FLKK; zMB4G%J!S(T2yy6lYDn&_D%K%|CB_UqDwK_#A>^GxEj^H+F`z$6nru5<;e)U`#FHDw z;*iK{bmwMfP1Eoq!F;kb2>d3n4Os*Ba6w4!%~Y7q(I*#S`Y4Wl1A!DHBkhB(islqD zHub4CxJ3G$lBpXIJhqg*N-~}}OiZqsOva|)tfWuWOv`2l=f&a&g6a;D{s)0p1)lDU z=t`+_^z6a5hD6+;P9pp@Mwb*$v^X8VMzq*)`hD2+C-aV~UVJy9a<+}rBs_#$OR`kH z`o4iC9*J=E30kAkLv^swUcg{UtAmg3K{A&vz*ip+(xxMq zY}z^{uP}aE@+T9ZG=)4MM-4P`xHWa2&K_R6fpK=upS~#4P|2(`>Js2(IitD2MaJR;#g_C6 z#JwUF!a|ZNGw=WX6h2wQHzGQ$WD=xR7G(kj4Q8gS&pQOmk;+yNSkNA@@>SgvRdR-X z=JW^O{Hd;g7%eV@LKVV1t`R0U-3`z~?)r37Pcy3$$CDUA< zEoK@9L#*b8!-#hqG1%5|9x?QD>}XfQi55fo3J=#R5$v1qwVPOaj-5mP|{)fr|7Z^2{bE)1s)D*C7S= z%Cyjnb{z}vG_q{BfbU13vgUFIcKPdNnuC}52s;SWy5v&LoUCBfdx3NxnTxNDVz#FH zOjmuzpeb4#!{~6_WR+6QeSPOr>X&9b_otsHk;uGJj6QHYV4HEsX4q0125rWI9fF`eSGTQcZS$^fl`?RSLDcnewqG~svxViE-~ zCh|722={#8M#VZApb{^gH6l!xH+-YCOe&_+o+-K;JIn+pFmXRH7Kx5~(@H_U6!F~e z93(SwahgEn%*I@*ac+#R0X-|o?MLo%htVF<0PDr#3StgnKC&q+w?|-JtUZ%Zg%(aR z5}YPUA%tC0o@Za))E`EYc^goTdho+q;VppM2MH74-O4)~1fdPnar;C1MTcf_3K;M9 z;2hu|t3MdZD_MA0EEatoMLGcyKdl!7r5H9kma+&Ff^p#16_5{y{of9-+W|6>B{Tq& z)0P1FE+A8LKJy^!a&9MoE~pvEIe^-V;Y~UG_+YTT{s0qUEX;jp0xOY=qpXg6wwsPr za_!(mfC`Kt?1>Omizgu+{r0Dw4>1r#ZvG`LD@${Z{S}m?E@0(NtX(RD6Q-Zr6|hU( zA-Wi%3u2Bw3v-Pn{28f179{g0y&lyQkQ3OmF+YC+jvyW{N2O32mf!of@_91=UN8T& zizf1H2?JZkGTs7fXjSh9QkCqFH0y_jxl#4E9hJtus(rk!Sv6bG!u3#iZYS)(_(mqZ z3id)8b|ZfiB`E_O+#|z*rak)27MFlw#6w@EL&UF3h+S~?LNb^MZ{ejsF=tTW#dfGL zpx0I*MNsCS?EzdT^K>(+K^~MxeF_k{aNojZ<`!pZs)!Avqv{x$odG7K)L|C)2{R$p z=f4;dkPA2-N0}BlZW3Z|pN*|E*dD~mGGa8&fVugpiOAH0I~5?xew^h1!G=Z@joVJV z*g@9q$w1)!L%`Kb#f}z@Bsv;u2cbui_zYx+|MgCEv|N(w!i~BJAp|Q=X4Y?_{@Ls! zC4OCSagP4pRnU}@;G?gNQmrD4kbX#?WVnb!5jMP+wkh04K~0yokyMLFmD`ZRl9A7H z=K1$0iF!pr9)6A&qIFh*BAIIlvNpOBUTh^n*w$>7C5HG zRh1Yhw^Zf}{QktJO@?sNGe59PNGZA@H{wUo{0lol)8H{GV};BSPkL3j{;Chteluco zjy44@*+4e_MgEZG8%UK|w8&%%qvrtxg$5HO+L+vqKkcAZPK#Gc*oUbR?#&vS@o_{x z05ArcK)cEU65Rt?rX2R57m-#>QADi~NxrV=Rn3!tZKk^MPxNsgs}pijP z_QC3r^i0J0*45nh^C$rzmy)kjRaigOpjk$!U5jFX0duIdCB{2JC+5^<;$4)m&1Uq4 z!Nn#Xg+sgw6nV?u5uluA|X`_3ZZj{zT1E@602Vz2g;9 zS|!aKvjy=p+Hnw5W3i?=W*T}?^XAyrj~DW-7`pYwPniX8X}M?cTR={-}b`yWw0(cRci#_V~8}zO$zxO0M z-VTv3VKoaN?sM!1y#Cp+w2*Iwmq=|M&JmCI$3F0V74^DCd?S*)w|>G?#v5v#)RCWZ zp1ALAQH-y)$y_-A?k08bpXlYj0G-sQM3x}*@P7eJ>ae5ye!Pz)Jq}sveZoY@8_9q` z*Cn5T)}J<3&wdKT}37A+^T` zyCp$F<6=Tn4I6SOz3HX1FCR+i2W27ham{dtY=lW9aK`jS`m0h>bxhf=^+WUQ5F~hL z;H|>e_$r4CF&Vsz1n>n;2^oo+1bD?h?4dG7rD9p&y(DQ#I-)a+88;k)k=h zZU&a1rtOX*DbgM2gcwGb(+Q)^&Z+{nd(}oGRX$-4Ta5WtK)H@=x8xLH1P70kBxwj87|3JEvtCIJR&!7 zRJG5sDLTCx$V)XNm5VQ{1afRz|I6UOQX-7x_mg7*fhN_0%{c3c`A=OR%}Z=F%_b6$ zobg!0OeL0k)uB@r7&%#Z5IT>uL3t9dZIstEk_<%cIE7+x*|3z8cK+nT^F}H@PD_+F z$!2B0=im{7^=`6zA;uD!z+tkbG2KH;&!AnW9$ghkW$B6@CaFNkAKco2`-zp2>wF%x zIjOE1fmfa*h_m?J(dl@upzT)q^~B6Dgc7BmM-IgIot}M5ki$jc(0!U~^Y)@Ogf_5) zfCJV~P_1H4bkRF}WeZ_~s=mLc1BR=y@KZOr^r4vtp`m_x%PKTv7vh6CEJ$T+4XCHV z*70P3`b;re9QUpso)81(IUXzh1TmDFR++^RZ*t&_j6n{O9pBH};8Kj$^OIh-B!6t* zX$}$jr+!1ATt-2;5;T;DR8)@V=d!hRhf}5Nr9jU}b)ItydwAFn*m{&wpby(E#>2%R z2{i#gqJW-t%=)E7TsoTx2G{}y-XOk=6vIxZXugt;QloaO$eWXQ;Q;o503O(NKT)vb zt^ef%STsR&(-EE`=BA%bdhejj#Ky^=#mIANlo&O%#&$^fq)-wOFZp_Buy46_AiDiYOxV zZgP>4wktndP+dFP^jn#U%rxDGUEme&J8?DY;*LBN5j*gSejec->LokjR07u35bUR@ z4Q%CtW&41FabiC*@kr4FWQXXI4t*ecvPdiTikBqPJ>_LrgMwZc)q3|a%kMKI=1?ajH_X($`=%q&R+pNNVLuDI~*o8AB z+GHO>CjufN4PrWM+bZ4A&fg8?5}AzACbQ=hu}dZH$AEYXj++kaxM}4c0YMW3cOt9Ww z1rDy>Gfk)s4jb3Cj5YN2Bxj{oP?}6{29j-8rY1>}h6zRYUOd;}klS7~hohI;q{w!GQB(`Cv{>jae(#{?N(h3+@7-HI+Qhhr#re0WO>VVok zc99ZG6WmUyOfyYRBmsm3DQq=!#M0;3VfTd$6d2m`$}^N8bHY{66batJd6?~QE@%}m z@9BCZRine9zkTX|JPUjozf%kW>SoOyur|)is$<&p#}s4}puGbrN5!|LT`@gHZHI&d zCm-stWLgZ%&ZskskuEAfAtQ+}fbc!qWz8NPkN6N@YS$eoz$v7KBuEh%q4_LWoT7d$ zE!ji}Z#<_oAxU%%7C+LVsRqc5UoM3%Tz;#0vp_i1 zNs_3_iv^;ozsbhwiq>;p=O#`dQcskOFKLBCax{Zu?E;dxpxdWynhJ1YLK02w9mc9q z)%f(qH|&cKVpm5M-v}4#ir*&k>hN1+^&kmaW8y3!7VF&PGXt~yOBa84lytaG?8#zYDphGJYNgj~o@(h7MBauTe?c)fd#~cjNJsU=!=5B| z4sDWj~ZhP`@k z3JNO8r>GQ*X>NXUiAC%vb4si?7;jXq;5027m4m?!7qwLOky^uRsbWQP=+|b#YsO+01kBv3ef_0zuFZ* z4VlLz3Xy1XC^Sy+CmUBB2O5!k96uDE2WH4QfQ@k-h67t22hZ8(DS>(Tq;p(0T%;J) zz;-DIn~p<dCW3_FA4`6I~Z$t+>K1rm|Xnj(%mefeOL1x&nR zqfBqr34wk6F-eB`DCKxyW3@IWIXwuBV$6dt=Ui1_5Rl#qNgQ^axWSDmT(8aSjL}qT zyknuF<}nkHA}gp)+V!S6XxTOVsbdbd2>eqR%A3H{n#C4_iOpLIh)3QkLA`4MsTUby zdiDv4k zz|-2&I$y(T9_MihTeU7y65fF9wCUgpT+Cfk9=jhjru1Um zDVB+<+D#y4*t0`b1@{weyVV^A+o!XB1}V`rtQHvNB_2I1rD&(G@zy5 zR0(#`P;(2(RJAl)1XixtS!2JK9jt98$=lHm6>lI>byu>MtNuGwk0iLJY-3pH*z0$c z5FVQa;h||_8IEYeU8~vxMvoGx$Jd0Ua-rtcP9CId06Rd$zmxKSoo~EVa}P}!4;h9{ z+mi_ColhuC$&~|n-jBF~Xu26KL$&6hS5LwpApbR5*L^fTd9% zxqYl?q-uw~v9K3v{}&S7=m>riFD2j?BZNWG&0ehT3Z)RU6&%5xQDY12e`VC+h9(`GKsJ<{ES=};Tabba1$ z|7q3LEqTvB2Ne-V_KR=~L1R;!hNn_y=8pcfr)DCqD7yGdoNqyF?ZXr6e8HJj5ENp_ z(*#A(I+}>QIHhlnhoCV2xLy3n{MO(dQ@CoR{c6hfa;w+=-)ygBBC_jiwuI8sF((7@ zh&&ulN5H-0LQ?;r#l|If?m?kn*%AxS*v{#%f6xU64v$0fhIILeUwwodk}jmEPj)3%L@o2 zp44|VzjPTXRD{nWbgS?@r^2knn{<>WRE4xYSN?AIaYY|6UJk-4+ zuQb)jiS$=Xea^^xXGaA}lZEG+v9;pp+Q~mHXaoej6EJD2m8-qS9Snm_6kOP}YX{hn zI&VSjm*fKzG|zABwnr`8B_(aHj819dOx@5-OenwPia9uZrJOP8H(|DYiu;pxGD_pY+P)y zV`%|@(IH2*6hSoN>0gL5h3|8cCt@I+;CAQUxiNqg&h&s;w!U#a1-E%J_DR2RR_F<^4t*}&`#DOe+4gb~+BrlJk7XsTwJ9FRMP zPv>f!CP}%`E6M6i6(8Ii!wPmwJALae6LQFa4m=e@P*C(38+uf=MH#KV7*nBhcn6Ip zf4tl0#W!c?1N|*IC*GkAS^Y`#Z(FxZ>a|mFsyA9_z0lO=S8V{3Rc#J-Rf}PQjuYA+ z;<3IcO1J{ph_k9A!F+C!fpL^-!E(z!7bA*19O27UGwyfMzdY|?S4|th;_UHRF}Uku z513Z11?WyFcJmRP=dDBw5N?KusfXH&+{M(SkPMpko*_`4TqI;pEP5n?M&L$}B^~-u z#m`wHJ|o>nUGDvDAmNxff5espw2y3|xc-ttz9E$j35sf8w-Vx+Xm~m+Cl`QngKNpb z5t%?*XzaR7e5U6x#cNK*F}s!c45iS(n{R3sUSXn~`oOF-k;%b~7^)g?TE>{$SXBEG z*I%Wb30BH8HUEtRh1EZ4X@BR2e6AOaPcxgsD!4V_dEqaF>6A?ijtyrk$;XyZX@Edd z7H>7*hM7Pl5N(94pX#_h-mp}+4$9??^N0xkoVTGL*wR_QVgN;?`@W@SFLL%g2C|;6 zBfYSK;Z&3Fh+J7dwCAM_!|e+=Ur6|v9fu1h=WnZH&T0K9h)eBzrRE$TV%bDPbKauD z;zl6h?kv3a^tx7vES>??svoi#X%^ZWX|Nx|~i!uc@R+5n4u4FKD8mpZelC zV5eqW)jYTgmRiK%OO~VTCcP4`8{R6DSIU?T55J{LyhTb^RPm{;Y0bQ7DnjXcV=|vg zo+NSqW5&071D_mx5nlPA8i2N)T5I<<3Gqrz05(%}N++`URnMIzS<+*)sIU3C@~==C z<>!8hyV3K~9fd2u``(!>qV9*wvh8v%H9^3vU^LM}gGFwoHo^)CfLGp!ISf})-oPS1 zn?V{Kj!xFG1F)jVMMS9eJ4NSY-X>&sl~q2(JS^{R;){wS?PLXfRD(bHQd*C6r#~?q z^w`~czafF(%}14r7Hw0I9qVYeatY$e_6)FUq58q%v)9KWlg^+nvKr!Vp((7-eYUMb zBAi#}fE3A#iJQiCCPO?zMNGd2DbrnqAm0uj7dMgYom3)~Man2akDhT6_(JI-N@5b1 zK#qZpAI#8ZLYbWacnCf`E0^gLAj;p)!{Lk>g20_>@R0d*OfThWjI-$jZ*bRuLnD!rBpD_e}*mH zU$v;|_OeQMu4HjuZp(5K$hVp!&2IF!w?aSk2M>g9**BPQw2wa=bV%<$C$js%u%bE}(8IVnO1MzQHJ>9JsBPnzpoQoq(+v_6)yUce z!dI!(a?+59kawq7F~IFys#S9g%_`aJm+4&iNHGM&n4j&g@f%$l4#S{58 zVY>1NFiCWoOoAU;z6?l7tsP_t?6q`EV!|$*C@Lbo|jp#H3%lZt>&{!+|R$J)O2RcY)b(;Y%_yxdNsOr5g`JJWd{uX%ooJ3?V;@aK`KL+Ga&;%Ki;hnn}9yyLU^QBMNjNTjdq(T#=K-a)48^i@ zQR+cNniyV9DLULWne?(Q2fMW<@LD-kA^lboreU!^6N(uAZ|AiLf^0)~>HpM3Xy`fy7zf3Zd#nW`?5 z4~K1yC#EI0HN(+j8lYq~>RF1B>;ltp4?~V(%M&fdMElW_d#bCwsnS)w?_79LdB30P zk?gcG-N4C;wvQhVvy4-dBWTYDc7<|!eH|SqNT2+NSRMps|36?>^^EE?oB8-8c;Pc} zp-h^G|00{0mHB@vrg*_b@ktM^Vn0FWD?(o&Px(R9aU?k{i|`!YKxnMTE@(j`F}m(D z_Oz$f+!>ZA!xwg6UFo>%eVyt&Q>Xm+R5 zLKV2P1C4GXiv|dRUl$1_(^|^}8&W9@CetFQ& z<}O4;*fXnCKw6uB5i#YIh=`CUgWd1#|ApUpVlAuU+cT?a$2)>GK#0y6m-d?PCE52} zSt+*M9FXs3d!8Z0Y?L+~RK^!hL1i5vDqj`pBoHt0<+SL8S|HqgqQM}+#h9*JrXkts z^HQJ58*YIKAvhdajk>`7FMjjlt(tk`{=eR?-r{5UtDu|_b8=Ya5dIkNKy{t&QN9So z5~qCXmGxY#o6l@=xMeTyZw^R*^HmY1&x#+?Wf-wSG0A}SxoDFG}UF%y29A1Is zccP0y@haJR&dmWQ2^^=a*>(c+MH`SI-wp0g4Jc$E;HmHDpLCR36iPQd@iw)!O2-5l zUdA;lEJ(F!s1ha%(B&ZoTi%%hghp+tqDfB&P-$jQ6-EqA+B~^t&eE_MF`g(36muoV z`)qz&!j@M>h#vLf&8}hjwU`=Wy`6`f&Gh13lxb|Wbu@a8%<1Y+54cg@Dn#-;$a}T% z(TGMe^!5V0H)iUOh#@&0SBH|;hMv8%iUd2NTUaWE`}eNZ#c3!ydxK4OQUiq01yyh{ zQ}zc@>lR~qJaH=DiKKa(e4KtxuS$fBjnPahV_HNe-#)vCWbqPYv0dWgV(DYU(5HU2 zUmUA{C@yfF45Mj>flWff37ngPAcLcLr{3P(z>-PBWOez}=V4B)+S2h~WAXh_AP-DW z0$ewyg5C`8YwbcRX8iCzoP*$e7_x@fZ5~V`4i^yhQ3mND>M?vdyiS*ae3M71MGTDO zr?0?-p2%Fo)X1=0DM_U6Tc6wW1F_tF;@HqSRIo!fa#={+0YgxXy97)#YBxyF3%QE3<$`JE;=G zqR3z3f$LU6Jy8rQAmt{mSFo`^KulirJ@7YJQyXcXT6e1j#g9dpu{XxR!p})B917Zv*HN6N*3=6MQmO zHQW)FyxGr!9>Y#Bwwu!@uV1DrBV64#xUQ*q#Li%V2q4QxfkgP$g(SJ>A^6{G=9yYE zAp>OZG__otZ0-3V9b*7RZkM< zA;%OP)dutzrr9ohK<_OP!Q|8P$F&J~#QT(1a2@K!H<8_k$YR2D<3RYv!0r@N>|f(< z0;jR#I%Qv0$q%WBxY$`F)q7D*&GJE^{o^rlQ8>-!$%>P`aSCWe^v`mE5n;x-#(1L6 zJXRJ{W8p32m7WvsHn|ag2HH5-EvIsZ{p%IwJ!sh z^%XXMi_^2u#SsnqUSA|NCA;)}B>eqIfCor`9aY zUVy>=FUz|@&70cuy=~YO0;t@VOnp*XZkYi+kD=Ey0gD|8MI381P%KC=;R#s~1Ddpg ztcx3?Upj^V?r#RE6go1rY!ALb#F|hGY!#X5gC&n)a4vY8EybW(c6%?j4#ojD?P9jq z0%!&JvH+s`0-7oaB~d&s@|x*7dNJC`ErCoJI6pHQt6Q_m;5W|i7R=l}$XD#e>Pe1> zG!6yeZdS8^C`;6TIS0uRa2^8}MMp>}Vc`3wAV6_Qyl6%q1xD1GzLc~vMOETdxYvtX z9wzsW6d)nnnFa{CCoXu3fatVK#R%0g+D?lf#rtyTI_JIfh^5+Np))=y&Zyrp46H{t zWy9AWYs!XI$$#FNU~$P3l&##R425aK4%zpOQdL)P3{E25#*JELn5~wiZHf*wu<1r~ z8^<@Yu+$Kpvods6bfPWSgiLtUWJI8er-&^`U?-t zyk+CfGq_I_TRHswu};piXs=Vj zJdlU1kmM!)Y}nNlcl*iB^1JBq%Vbi56<0mS5-L|5`z9hxgx$YR@(gs|axVP{8_8Mw z3h-S6&Ra|%|BwtM2WMWdkBS;~L#-NAu*T8zN4gyPhW)Vtz~Xp6YEYSS7URj%d4I6j zik<1+;gO0E#-;DsDI&Kt#$5n(u{pzbXc@;`@hXjXch$*78taccnghLG>=VKx;Z1DpAcrc_FpCt*FO9Je zXH$p>aBuY14w>NpyN@Y51|vfh4SHCl#5B!Y@cL&nxqG)ehJTJ~#pg9;=G0#sbq>$K zX2RQVMJ>5L9i>+I34i0 zV7ySp#@`Ckd}C_=?L>Gsp&~t^t>M*M0fsDhs|D!FWm*gH?`b-=Lpc;e^NO{!41D$I zS*y9^=JkKI={3>{=0$h!PY#7A`AClp*Q(bn-GlJO2Fk2+zni!v<}2H)f)JRtHlH5a znOSv@z`H@!=@~$xR{6e<)AQkNzli}U=1PKVZ)T0|FC7XCDuI9a;p0tQp!cos3jruW zI~U1A-WG8L-?h~|5-5Rk7f8j<6p!*fhbV52xnZjOGP|Hx0H)eF{r;0k2v?h+L#^fs zO#xCJa^AxKM>=LYuHwPTyHoZ>CP2ywby-^Q>!3*w%xh$eHTfF>!)ufx<|mLqugApIaBtJFs3{(2;A6n?cE3y$)a!ZCM+_*^&GP zXe|zb(`34ga0TAor8PZ@re@A0#YB}t zO;4c@>Aul?;oF7a0aZ3mVtxIICjuY`LYG5H9q&VHRekb!k+%(iF_lwh_ zuxj_;ec`&5iQU1}bTDB1J&wg9zk_v$?4lqGO-Rf^nYO=$q5yuA-Mvo8IEMM>7t%iq z_g8bvs0}7Py(mNs?^v=uf@j{-gKPClLk~{POh!?RQ070jgKl}iV|3UbbR8Q-I|2|Y zKB zhF~3eb90DvzTU4#w3-8j(uOoSjpPRo`tM^m)fIYcj1u%friIMfcAHm5yk!GN_f=?Q z{o?J`eZldnOF%Va{?mo3IER9LfzYw*K@@XLWQ05_BE2e1HB%j)w5$|UsR{{R_gq1W z6^fig?Nmrr<_Q5fGd5`bI(?{BXMUyFtg2efOOg7S?W?R$#I|_tH)}eCfXH*IxAk8H z#1P5-6-R0obN|Z?5t)(zK%PH1g-Y!}X+>xa5i!X2gKIrNO^!cycn`l8j76(4aKkDr zw1Eg(s-zQNgt#K@=}F!frQVE+0I$VOMx(d|p*y00Yg#6rRS(r4caCf^Vpa&A2%f^J zi+$?fIHDNdlqdZf_-`Kj7d&&+V;dLc7^L#ZF~OmnN{4zT?rq!}0Sk%O!%V=M*Te6a zyg_!64(fh+_^pfLA-?m8+wD$Trz$qdRXaTJIpN%|C$g<)QISloBNQRuIkO9IedIOQ zbZ&l$BFkvN$h@Evn#F0CwQqE6%;~&pN%t)_mfmNj#O$h)BYbj%=02V%3DUd-4QYWe zF-x18RX3~~gqy8&@WUjzn52t18%zx^HMJq=tq$B!mc+(-4r;85z@zY#b?innP4_fV zfDj8>Wy2KsK!E<`8+WL~d8Zz&Pz7b%YfXUBzXqilh+V8uPGfgBtsAjL6Nx}ERqSDM z1%lEs-!P^d09H*DbpGygkC537R*@ zMc^{K(Hfw=oL?0ZR0_{gY{;|ET`i3>K@22oEZaYOA6xHCctmYsB`@AHE6R)Fo?+q- zp(u`k7!qRtQzlXF2k%|FE2hCp(mXyGqTiG`72Fj6i=Y8SCmm?XqUQkIa4HXamGer9 zl@dUrn;sCVF=uAT>ZiptX0I5>#XYF3s9?mkQ2ZW#dS2cwQV}~$e(;Kb6XMBjQ;mMv zU>mEw_(1x3cwPUCm8D)jR_+nFfHFWw#mjpc3giC<5cue|-O~gW3fKH5{1L-h7qtb2THh?0>j0 zzk>Vys6`9%Oa9c~{w+Sj`NFj69jq!mAvEwXqh`k0gUPjiGNp0;TvYc*8fgPIleeM= zx+2Y0T#mO2II|Bctb2gRKrIXgPr6dC;{^`j$h%DT7SbDO(MN!T{SOzIr$l==q~R4p zDo%Y=y4|=v3w)N+5ZW&9MqZX{ObT&qH&L6|ts8jNz(acTKdFRYj88CNzAP^LRw%x;&ky2{(Fs=1s+&8JUQH?S<* zM&=&YYX=tX!-glGk4z+^&L%RNqB}MK5Rrfm4$S$=dPx#*0emycekc!Fw<0l5Edwxt zM;^%*zYi!|Kefvch#Xwj-r`8>(m7gm94gAm&PnzPR49PzDVqOZ)QI#9R4uN27%N|#1DMXSG} zMeNHDRWB40xT;qG$%8siB9wwuVEZV(>#w5N%hY@LcC6F@hTVL@ZfZZEJUy!b@2pri z=ar^@^YhO2;{b%F@I#SS0Enn-Z;ZX=5^AIb>##9=_4s7y*}1k8l~-c2mrY@80#^lmlY=x*g(adyK^igY)xNR|@k_&h^5c^`V<} zP1YQJe|HY$%rw?}fdOXd8)&jr8J$r&qKEG6&KvInaJ}mze!nfqsk+iFSJ$N7NhhNS zg{<1H!TDBmc*SKR|?o|TyO*kD>X&U8jMq1S_kWP!v9pUh#4^7DE( zB?kFB`Lw8S#s4vJi=*%rO$AkeC5f_`fw%OKrCp2cX$1+ly#S(I*(Sm0e2rq7RE*At zD+I1hdB0^VU%s%AOeNZ@J22V+q40Iwn=+S1v3v}5F{22=f&#XVG%AZ?Dw}`xl3xzd zil4hvF#8eg{bH)<=HBW{TaMO^F|o8bVe3&byYq_mSBBP=(4@ejlRvm!v!va|_MV(A@;s-V8RVZT3wb+x+DU&8o_C#>9;r1xomP^X4fgF`Yi%n3u z_p_}`=DCQ*#jz4PFOzEyePpvj0FzAw3EZd1a9spQ-6pavd@$0G`k6Ji$e)#)a;opGovGIi z;CuemG%+Fw<>HtIj{EN=_%04)CYZ^QMB9BsJyc%2KpvJj^-M~=luhT4mPpw3=vN`c z{5YqwX3b5B_FG^b=7egcg*-@21V zW`XvG%RV8na_1uy=qM*>pV?(uR|FR5{@^jTU6x2jlWmDtz7+;F{q{6O-s4_3H9dbE zp%qt+<7qYtC7l{xfksx~l~b~3`lEXlZi5~SJWnw_=PIaZyC&+0W!0fMASmvmzR;a_ zj(G8IOYK!Zu4MYEaxL&SK@C&TAJ%3)*29}Z;RnOJkG_7jTZ8kw2xode_MLQ;N}@3u zYASV1xL%;I-guq^t)~BzIHym`1QYVCn0+e9VA}Xh-rpX8#rash=$i8jNi}?7s1h$i z)6Q8^w^zDZzqq-V@Q~cGGH-)?80Z|e`ie4P)z~%KTucXTVe$iW1(IklK?3>W_fXQ3 zt}u2glO2IPR&fNYBQ-7ZgfNFOrw*Hbq!b1KO9g&1XzhO! zqe-wh6Kks8WVf*KW*?NYdcevd#TbIf|MRdPi!gC5d(LePNjGuYkK|KGb5`?jRv>p8 zoBEmsd$&JqNX<=nL`KtpXqe@HdqsT=;6%5euV!dpJ*Y%J_R`)ZF>3MNQt#clyJnRu zL+3NRvIIz|-8EW6!xBxP95EkXyjiZ)rp51m=WVhKQAwxwb!_-z|L-miX_9Hx^d0`v{_aTJ{?A$BWiIzz3be0F(JlDunPuW8WYvRDv* zumdWsn%SsMdh{#gcUDA@+ZLUESB?rh^DK2)q2{{mV;VW+2C8l*1k z7iRKV#GCTRI67A}51-t=z2X>bisAM&Ub85??ALRaZ*_l!%ylA|`E$OZL(c~wA;E!{ zT!YJ9bJWc?46In@QDIl@uTkdx3C#7uJQWDy?k^1=Rs_Y+eYh;`2u~Aecj$TfLV@sU zQm3yHu=!b$jjrQ3Sr>r&nYzsh?R1fy3=*7TAk!JP4E7=1N*4&ArM0x`4MXQ>UO)uC zy(MjCQbN$MB+v;QM;sy7A{C8zzQrBI36snrHw(ve{Oe9*?#<1+E2%{NyOTg0lRh3iK&MB^qi}L*Ln2M zxOK)y4xiu__CxmC$dZ4<N-z)`YPJVeaVt%kM_B{0fnxQjd(n|+ zqJLo}6tsb4jFE8)9mh8$=SaIK2BnCP3T7)$P5uhExK5t;`aDf+lxOdWNOB(c(pHu; zZ(3WgP8ezZUM|J2&HCC3Kzq8-){HMw?LAE~5}ffXrgxY!Yw)$?uh8Qt(4dr5phWee z>@zOttuk#p=~*td>Dw(bk=F({O06Z+ma2pDO0^CD2+v=K5UF4u=n5t^OHKIuHl^r~~?E0u#=bs(f-i*5HXtl17ww_d}1Dof5)!1ewxY&R5(uVha=Q@DV5 z%{nPz+%oi1xaehBg*G$U!{^xVO}njzOth$?o3e5^rIK@5y%0RaO>C$LV1WalmypIn z$w>{K+E&EpZ6Fk*@F>EgF}1flY2$)%NTTpg$u@Bw+_`t=vdq0OQNK5L;oS8Cufud0 z-JX?6w=?9~x8_WyW9+KcK*|{yIggzo0lsjMQ&d1VvRHeB){@(1ot@5Vh-Vkb3vu{oWg^qh7^0 z8?>;rJn`c4%gR$9?kOKKw_v1=VO*jz#t8N7>QVzcLB?!zg}wx_~@q zpz+mbHIKAPqEl0zGR9*txD3X{-zIl3=_Uk|@y-=LxD#B{t=eMm9vrSY-ktP;oZ49I+ zFqb2s+2oOnz8^F>p9nXmmXF#Es#r?BuA9bPg%}u_X5;94#|mo5yW5nAeoGo54={}+ z|36tp-@8_LEbuYJK#d>%M*u*6zt!)xHNe^GWAs23ChAM664h1?;>9LdtXnC8cASgY zCsX9P>oXFVfjOB@k8W7Da9h!or`czOWV#n*=UF5K(#lrX(XHx|LDx<3jCqw+-ML#H zuUL_0C23~_R#r^3e2_aA8(3iX=Y^umwU<#L)FPzqHpo4EvNV3bRV_iF7oI_#zS3S2 zVFI>u&dw|Dbo`!KU$E-{^a`REg(t{<<{wlM>Ik&tSR|$M&`)t}U?=V8T@*9tA@1HLq!-B+O3mKYsmG?V z5l;6n08Yp_3mq#XCctXE#RvM0g5A=hM1kRSxHKb1`wcC#2lh!H-wd4oxeIO3AP;=R zWwxiJG3genQV~%QLJ~yK!#>ZihGE*&spHFJjc)H%xvWuWElq$=BoDl*={9!_-?qAv-ua@l;Fux#^JZ#1@IoHK6ClAr%^rq<3q8?`hWau!dh&>8W z-viC0GR!ykkYJ&;s-~aeKQl}XbSK^M0FF37**8QZnOYN` zzI!f!V#jxRqQ!Hk!(D4xm5;YS^VOT^YSNA z;g>UNo^yn4NSNrJ5XyP&D4aZu_hL*qKg1?@@3A5iU~V?D_dNsHb}rS!i&sdeH>9nvPSc9)7$~Mx2jX)ph{O9z z^=iVd=!1aoC%u>9^#4uB=a9!RK02o{$214eFOmuL$M3UY*ce)K|hUP~W*F)RJ`HB}4v_b|s zV4~Xhntno&Lwj`=wZM3<-Fd~vPZPDoTiGJpET zp}zhMJptr6<0Bv#_cUP|p|k(nuKJG>egGQaIoo(uA_hap>i{ML>nPkkqZ!!KI(O+v z6t0P0Pjvx)4}G0?8C8!fZVE)GO=J$0nfd3swFYo&z50%3PI3iPx|h0?)+;XS)Mizp zwkmZ&S@a3-+FSFPLi%)AYS}_vC+}@PfIa#(^vuLzvvxpobzq_BSqOtPm2bT?<2*Sy zR2CAvAKzRR@90Q1fqvxk^!Z9bg6T?T?Lw$EBv{JYtjsM0&!S_4#F7CZ)ygLX#CeZb zjgDR(QaCY{`INW&`b&%qf%6ZtI$-6R!Jo>MjYSgty70T?K3qH(hvh!$^|k)vUBJK> zf$xJJ{K_jeHZ^$pONb}~G~amnLRMJo)T0V~96u3e|WrokbvjDQprj`9#qI9IDPpHwkS&&|8K%?pC+Z{zAb6 z0{lP5+t;#Ac!J6sQ2EFd^-xNarO}!k*gP=z!P!74Xe;x4*rBHWcX+6QliYlk zT1fOY^WtThL9EhJsi)~P(EkQRCbjC?vq@t&OY(~;#v8z8#{ZJvt_wlvgKR8vtm?M$ z4JztmDxJim92OB#)K*`Se0r_wHH0xC1G-@qVe4cH5#{jJKFFeQLp;)H|7GAFGJ;K- zldP2_a$GA}Xdm0kR`wqmuXY~-?6_h5Q%Xg4Fvhk_#oXw`!~_lkspL>RCHw?EZK?FQA+{7 zA{7CLw=DE1sG3_Jzx`Ur!ha?F*d2-pufye);!4^q2D0f|5Ow)Tn2k=T9fj@JH#DbC z<>)T_f-Eky<8?cEs>e3V#avr67efCyrbI*W?zUF zdGgAIJ0HcmUdSkWk`@WyEX_;bk96M~^jRW52ep|;-TXkj-uLUW6>Z-03Op6vwBtk! z18;|;0NcLNa+K>55 zvMrKlii}bd9pnOiGyyZ)P>t6#qgMyNWQwaSgGlI%cIwh5Q~KSAvJdk}V%%aokGM`@ zPM#rZ)>13Hg~lvpq0XSGSstATMF&Aq{h+1^hVk?4wcMm7N4bn>3roM-@;xoR#z z0y5QSmz-@`7Aj5de#2B+HYPIJq@Qc6HO&0_u(SEm zCqz^8N$R;(%2F(uMWZpTGsGW$t9})Oe#NufN+ZP~Z_-Oa5+5dbj~k9B55=tWl_H3# zCq`#tfxnmzRT|^3B^bVrc0sdq@<2ka(eI4GkD*`<*iE^|!4cbKCy@1-!Q-HyKmzub2%?}LgrI^FW{=ZJB%i(3u$Bv@X+71Qq^^9^1zJ#RJdo<5 zkxS<`y81oK+31D{oBVBXmnN-^c#5F zRF{$#!@v4H^&U^ZbnP(J3IhopIIcX2;G!|+TkKU-daRW+#Y@3mZI2%jkxB&{pR+<$ zzh_V`Kg+C_M?cx#+WWP1U$ThMQj1NUtujakcWyITq38Tw;845jV^UU1Im+GycJeGA zpz|Z6k2&Qu0simB09Wy5y?zp`8v9cW0~sTs_IULcV>1MD)1*cKK|mc2ov;rMmRk1N z54&g*&UM^K%3N?5*7c+@b!$u%90v~zxRkS;RJI_j@Pve(`Q*rmrE+=4^8Zx4_Ad`~wUuP#Ti;J|2D6 zna(s0Btd%+cfRjO#-BKzuPuXdWli1&{s%|3nU?v6<*L@lGza

m*~n%Q+jHE>&Cd ziLScjSBD^F?MG;6K>z|MJjTkY5R09p0t3N`zsu(Xx>8<0!r@{snzO7TGby?%)w~`e zEjG;aDTjwS-~Iey|9|oMXRFs%fe=DP#QE7|*nKHk{J+;@urZl)G~8N%G$x*ykN0EOns?>ftz3Evf|ghaF68@@ow# z4PnhDO6uL-{aI&_VHeBx-koyl=ltT0CLA3=%0B zneU=gvr4aHmMp2?|96yt_*<4r{*@)F;|%*>PKi9M5iJ>2B_K^nek$T@7??gp?X^*R z(7EGiVm4g-HJxxO&x#VS+A`oW%w4+I-24#ajyR4XBJBsfLUb?1_StpqPBF?eL6fAb z5P9(1z1bN1!$srNPHsGnq%-~@%a9t@z2lFrE7ReKTidSBOjYCTVqSP_UQk4(0AKae zvWvXMV@EiL)Gc`nc?F9RGRrH+Ve}C~E+)>V`Ki8v9C~{AZ>Dd*4c--h(okIBrt9Bp z<}r~?C?I!KcAi~C63c_*!|ALU! znN}}gkP|GdSyGhj8m#*{fiqEf>i9z|L)R##Da83dhCxeN+3(AF?1RbD68?XxojW(- z(8Y&Jq}dKPhxeM^tx8&Y3bIw;eppzn8`oHzTZ7+6$eFBe>P&ahvO!uxwKn1BB{sCG zr&bXFQtPsOOnnnjYDSR|k1qg^y_HNjm}D_8bLl|>>7bv4@-i(IEQx1bVgwLg1{8+# zLD4wSrjfrvj71!~Nx?UFc<<1-(_o{5zBx#~OyQCk<5#FSAuP-o2~Q1725Ra{AVLvZYG`!n&qUTq$fYa#y#~ zRz@?8`us7xmOacwp11FNFjOp+EL-D}i{A2@@+_9s4^`|r?g_n(qjLO3IV<^Mu~oKu z08qNrMNcK^I^;njXYc8eh<$gLThAQ}UgSm9BI#@DioQ@od-;MwrOb!$FKW!DM9W3v|mJfWJL;OKhQE zYN6xzE>;$1QV-Xd6Zvn{ZZd?9oP}=@z1i@I;C{_%6|PL=R(%SMwWNMA|A)7_RtRVj z5GMITMtlPxT*dN{@}-Ld>ElXzOaJnI(u{J*uJ@bkufISVEi#*|4_~*} z%9S@Z7y3y4Vz*vr9>FZz_=}gTMlPHxUt~}2GS@4;7F1-4~HX%)oDca zD#6@*rTc5G=?Rw%ck?v8Qy))8YxIbaOJ>B4n-NL?UM_xF%>6+v6koYWee1$mog>o^ z?mNY08?T$uv}8PC%(z7pbc@E8AzA!8?Z)YB{@4c<5*=ISK64K{Ru2Q;rP)61EwB;> z&{G$FZNaM;CJDKnROoem#S8(e;#P6?E`c3Bstq?2{Dl2IIp_LKQu&@o8YR$kF+b)P zO9hnV+EtIk3|GY<1dH96$UZshaKv;Ik$y5)R_-i}(*6aZAuzZBy5G-`l*}1|k?y)7 z_f|w1D*Uw}R)41yO$TD-qY$%yIJNt1NrveEbm6PnKXlu&+`yd!^nWLxfHtuUMTyb% zuE;FI=;FchZu(mk?U_I>D zOm;qt*D5|Ldhyew4iQOusgO`IfAbScBFZI10PeYD89e+RUM{;Qz;&0_W0jTC`m(ajj zbKf3;b!?^O;>K$+&-0O2nnni6bmqyishw?juW8Z;Annw-J^0kd?~|NKs2^T zmez0B>hMR+O(ck)D#7!6qJ;vbU(%()su%4kd9W>HRosw)jRmll0>~+8MJ0(Y7halhx=8#V#Iyrr}soG#5AbG`a`o0NU_^s`##kQYPOi@=B zK9NeS_B$~tnYXu(3w4^*>8SF_c$3Llck%6*}MyFh#5^!6D&~dmnd4bt^#V3b*kxfJS((`ZN zNxM;oABf)L*^JteCa>}~o*NtB2+%XktU%IaOmT>xj8=hC3AxHMARa=hNV&XC;BoRa zbbRF%Q{l2(*T&!FvhbZ6?+$plUvFkOW`IjsXYr zJ3D^iNO`bUJv+1H6<$Dn4`caclui~HFt5ZVM=(S1vg>-&iFZ2+l6d}U)>KAnfQBZT z7*@+-$0R39f0s*Tr~J2e(Rms`d&+xlGG9QakaqYz00s?a?sLb|;KMLyb4CFb4NhP? z^dphdB0j9WKF&tbK02PvFFj_F3Hg1_Nu#QhrbUSvt08QJ)d%3uZC4`DY=~@?Cm9Z$ zh*S!JEu#LUwShOv5L^Xj_WEqV^liCftzXHi^4qrZZ?ybdY>L&xh4zaH>eGJZS0vGs zr75E!oeAFsxqu$UCl-l{Ne!x2wH(lt_+$!Hr+i@$$}lVm8K%%vFx{#Yg^|aK{m>p< z5S##e@@{*{%$K`14xEkuSDbs_7LW7BGlSLq2+)y8Z!ZlW_8mrA>2dcJ9@6xoc+UrW zySoOr-U%E_P&AiK)^8$`%iflzA*hzqhKEug68%`vvGTKH(vJwo^!-tGa(pvv0sRpI z?vm2;t&(&dCmc=@C3l+ICimv@Fbj812Ljo!2cvFtsHCD{VuB6y)Rw_6`MwYJT)vdQ z%m;VVK>{kHOOm%k9F*&$iiBm4=5mSdtf5h!t0@`(|M#V+m2fL5G?L=cAKw@;aqhMa zJ?$tueq^K&IbK|=O6e4K%|hen$fbCfnF;YqaLfDUYIc8i?)1Q1yTvM5UB@Cw`tx(= zF$ZAcQ)D{CKsJ9j<09!+I`Ag3W{jD`1=IYrwYgtaZh=(}!wtZZB376_BOE8M2~I9D z*}v@36Zr%%jm}B9w;BgDsoL{8H2?7>c>V<%h0~k_yQCrxkTpkid(90eJFHyN+Q1j^ z`$$?N`-o91+~k%oDV9C_x3vCG>-cW9s{o#D$^t;3SLj!%URycN;nU)ZQ>}9y@Xe_SmUt~_gILwtN!5?|cLXt1ZS;c^-!`+VSPOUPM=c8{ zENNW%rWo&D6q5*)-Yxu5O2PSf;u0J3s9ph=UR#YH)Y-4~cZxWF&L%qe_;C35+Eo1< zGY0jH7jg2pzj@Ag9xJ6BL7U!_$B&llHCdMvNbvLW%78I$0bR$4-^i!&8{298r*@n- zDCRuvmQm<3jyPAmc#vlI75?C_UY@-jx?TEmy9~jF!qzR1jvi+>mXgd86QeP&X}G!; zIBozA(~eOY!iL@mp+ z0%k53)vH@|_0|l3LR?9rn3Og_FX>`{VaM|DVRp!gMM~NGegc8Hx>?edMQ_T+PH!5k zL;dGVp1K93B3>%GRFDWh!F0nPcRB4R8)J_PS;M43n#M&;#0tX6BGHR4Oc%ehl8K;S#6~9*#JMad-imCZTi0p7plsb3@LdE zGMW}d#!3Q=nC8TU^`s&pIz4R&h7Wa@vf%kDO2uXXn73JK+esCxwe=)<8rrfR0f!Kj zuE?Hi!mY}ApVHVX8e+*$lu{FZ*Y5aLE_Vs&>7liz4iQ6nnO9d`sMb@islMlVO%v?v zFEsieIZhuaq)rr!QN2E&gzE>@VU7evpPY_o0bH;f0I$O&s820Y)lF3b12tAFUnor~NXiU5TX!0*IoiPOrM3rFETmS0)Z~y~z)f+9 z4sWoC7pc*#=;-xjG{l-*wTz=J?`6l)46uy-FyC_dAq$s3k2&vW1q%D5+jEI`!$>DU zg3W3)h*GY%n zE3rbSOKZwr^iU8JP(Cz*r3L*5Vk$icSP{d`Z7?RGLAAouy~8OO`9pgDc%Cuj#;b;$ zi!;s^X*=`SlJF_QE2zUX$r~CpC4F|`Ch*|&0&Hs$lkK>lXnpHIP{G{`Y;!<3#mcAN z7l@<6*S81NBYrw^VQsg0AWR+g>Eo&0hN82XnX@5++#C8w3OqRo?AlpyeLC)B&V4VF zWeAH|jqlj9m4Q5l5kctC{1>P^>NNz5EOr=jO<~^a=YO?Y^!or5?`zF;uYjk#7~uB& z5e4$39f6?1R6&p^|D1(K=H}^_n`N#$LL{b*cS(wkST%Bb{5=Re{zdrFhC9F$PG|a2 z^7o)$`giybWTg!tvbOfUZ-2VchS9w!rnv@vpWf8gk>Yf-r-B1xIalZEQD_=xT&2@T z#z;({52=}4h$}B3_KfVHffN6UM-qx^wnPR$W0Ir>q)((SOIP^`ebSv<7!Yh|aWv_U zX7C;rvk7Qux}|MijB95WR7R5-zu-%_yah$gkw$zLsw7?Tf@oCrL~TU*RyGq_hvq-Q zRA5tNi?7iZlpJl0s@t!>+TBXPjsJS$?`v)^2zcR5-r(8ZU!;(V&HRfDn!)q#f()id z16MwJd@7WZ=$sE1c1r9$)DQhH=(!hYDoF6S4=~=5+`la|-Q}QiXp4&) zR

+ * For example: + *

+ *
{@code
+     * LhaArchiveInputStream s = LhaArchiveInputStream.builder()
+     *   .setInputStream(archiveInputStream)
+     *   .setCharset(StandardCharsets.UTF_8)
+     *   .get();}
+     * 
+ */ + // @formatter:on + public static class Builder { + + /** + * The InputStream to read the archive data from. + */ + private InputStream inputStream; + + /** + * The default Charset. + */ + private Charset charsetDefault = StandardCharsets.US_ASCII; + + /** + * The Charset, defaults to {@link StandardCharsets#US_ASCII}. + */ + private Charset charset = charsetDefault; + + /** + * The file separator char, defaults to {@link File#separatorChar}. + */ + private char fileSeparatorChar = File.separatorChar; + + /** + * Constructs a new instance. + */ + private Builder() { + // empty + } + + /** + * Gets a new LhaArchiveInputStream. + * + * @return a new LhaArchiveInputStream. + */ + public LhaArchiveInputStream get() { + return new LhaArchiveInputStream(this.inputStream, this.charset, this.fileSeparatorChar); + } + + /** + * Sets the InputStream to read the archive data from. + * + * @param inputStream the InputStream. + * @return {@code this} instance. + */ + public Builder setInputStream(final InputStream inputStream) { + this.inputStream = inputStream; + return this; + } + + /** + * Sets the Charset. + * + * @param charset the Charset, null resets to the default {@link StandardCharsets#US_ASCII}. + * @return {@code this} instance. + */ + public Builder setCharset(final Charset charset) { + this.charset = Charsets.toCharset(charset, charsetDefault); + return this; + } + + /** + * Sets the file separator char. Package private for testing. + * + * @param fileSeparatorChar the file separator char, defaults to {@link File#separatorChar}. + * @return {@code this} instance. + */ + Builder setFileSeparatorChar(final char fileSeparatorChar) { + this.fileSeparatorChar = fileSeparatorChar; + return this; + } + } + // Fields that are the same across all header levels private static final int HEADER_GENERIC_MINIMUM_HEADER_LENGTH = 22; private static final int HEADER_GENERIC_OFFSET_COMPRESSION_METHOD = 2; @@ -110,33 +200,16 @@ public class LhaArchiveInputStream extends ArchiveInputStream { private InputStream currentDecompressedStream; /** - * Constructs the LhaArchiveInputStream, taking ownership of the inputStream that is passed in. + * Creates a new builder. * - * @param inputStream the underlying stream, whose ownership is taken + * @return a new builder. */ - public LhaArchiveInputStream(final InputStream inputStream) { - this(inputStream, null); + public static Builder builder() { + return new Builder(); } - /** - * Constructs the LhaArchiveInputStream, taking ownership of the inputStream that is passed in. - * - * @param inputStream the underlying stream, whose ownership is taken - * @param charsetName the charset used for file names in the archive. May be {@code null} to use US-ASCII as default. - */ - public LhaArchiveInputStream(final InputStream inputStream, final String charsetName) { - this(inputStream, charsetName, File.separatorChar); - } - - /** - * Constructs the LhaArchiveInputStream, taking ownership of the inputStream that is passed in. - * - * @param inputStream the underlying stream, whose ownership is taken - * @param charsetName the charset used for file names in the archive. May be {@code null} to use US-ASCII as default. - * @param fileSeparatorChar the character used to separate file path elements - */ - LhaArchiveInputStream(final InputStream inputStream, final String charsetName, final char fileSeparatorChar) { - super(inputStream, charsetName == null ? StandardCharsets.US_ASCII.name() : charsetName); + private LhaArchiveInputStream(final InputStream inputStream, final Charset charset, final char fileSeparatorChar) { + super(inputStream, charset); this.fileSeparatorChar = fileSeparatorChar; } diff --git a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java index 7ae9f5496e6..c3246516f8b 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java @@ -97,7 +97,7 @@ public String toString() { */ private static final String ARJ_DEFAULT; private static final String DUMP_DEFAULT; - private static final String LHA_DEFAULT = getCharsetName(new LhaArchiveInputStream(null)); + private static final String LHA_DEFAULT = getCharsetName(LhaArchiveInputStream.builder().get()); private static final String ZIP_DEFAULT = getCharsetName(new ZipArchiveInputStream(null)); private static final String CPIO_DEFAULT = getCharsetName(new CpioArchiveInputStream(null)); private static final String TAR_DEFAULT = getCharsetName(new TarArchiveInputStream(null)); diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java index 04f8f14429a..8170df5ea83 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java @@ -29,9 +29,11 @@ import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; +import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.nio.charset.StandardCharsets; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; @@ -104,7 +106,7 @@ class LhaArchiveInputStreamTest extends AbstractTest { void testInvalidHeaderLevelLength() throws IOException { final byte[] data = new byte[] { 0x04, 0x00, 0x00, 0x00, 0x00, 0x00 }; - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header length"); } catch (ArchiveException e) { @@ -118,7 +120,7 @@ void testInvalidHeaderLevel() throws IOException { data[20] = 4; // Change the header level to an invalid value - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header level"); } catch (ArchiveException e) { @@ -133,7 +135,7 @@ void testUnsupportedCompressionMethod() throws IOException { data[1] = (byte) 0x9c; // Change the header checksum data[5] = 'a'; // Change the compression method to an unsupported value - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); assertEquals("-lha-", entry.getCompressionMethod()); @@ -153,7 +155,7 @@ void testUnsupportedCompressionMethod() throws IOException { void testReadDataBeforeEntry() throws IOException { final byte[] data = toByteArray(VALID_HEADER_LEVEL_0_FILE); - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { try { IOUtils.toByteArray(archive); fail("Expected IllegalStateException for reading data before entry"); @@ -165,7 +167,10 @@ void testReadDataBeforeEntry() throws IOException { @Test void testParseHeaderLevel0File() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE)))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE))) + .get()) { + // Entry should be parsed correctly final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); @@ -191,7 +196,11 @@ void testParseHeaderLevel0File() throws IOException { @Test void testParseHeaderLevel0FileMacosUtf8() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE_MACOS_UTF8)), "UTF-8")) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE_MACOS_UTF8))) + .setCharset(StandardCharsets.UTF_8) + .get()) { + // Entry name should be parsed correctly final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); @@ -204,8 +213,11 @@ void testParseHeaderLevel0FileMacosUtf8() throws IOException { @Test void testParseHeaderLevel0FileMsdosIso88591() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE_MSDOS_ISO8859_1)), - "ISO-8859-1")) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE_MSDOS_ISO8859_1))) + .setCharset(StandardCharsets.ISO_8859_1) + .get()) { + // Entry name should be parsed correctly final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); @@ -218,7 +230,10 @@ void testParseHeaderLevel0FileMsdosIso88591() throws IOException { @Test void testParseHeaderLevel0FileMsdosIso88591DefaultEncoding() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE_MSDOS_ISO8859_1)))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_0_FILE_MSDOS_ISO8859_1))) + .get()) { + // First entry should be with replacement characters for unsupported characters final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); @@ -235,7 +250,7 @@ void testInvalidHeaderLevel0Length() throws IOException { data[0] = 0x10; // Change the first byte to an invalid length - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header length"); } catch (ArchiveException e) { @@ -249,7 +264,7 @@ void testInvalidHeaderLevel0Checksum() throws IOException { data[1] = 0x55; // Change the second byte to an invalid header checksum - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header checksum"); } catch (ArchiveException e) { @@ -260,7 +275,11 @@ void testInvalidHeaderLevel0Checksum() throws IOException { @Test void testParseHeaderLevel0FileWithFoldersMacos() throws IOException { // The lha file was generated by LHa for UNIX version 1.14i-ac20211125 for Macos - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0.lha"), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newInputStream("test-macos-l0.lha")) + .setFileSeparatorChar('/') + .get()) { + LhaArchiveEntry entry; // Check directory entry @@ -361,7 +380,10 @@ void testParseHeaderLevel0FileWithFoldersMacos() throws IOException { @Test void testParseHeaderLevel0FileWithFoldersMsdos() throws IOException { // The lha file was generated by LHA32 v2.67.00 for Windows - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-msdos-l0.lha"), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newInputStream("test-msdos-l0.lha")) + .setFileSeparatorChar('/') + .get()) { LhaArchiveEntry entry; // Check directory entry @@ -461,7 +483,10 @@ void testParseHeaderLevel0FileWithFoldersMsdos() throws IOException { @Test void testParseHeaderLevel1File() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_1_FILE)))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_1_FILE))) + .get()) { + // Entry should be parsed correctly final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); @@ -488,8 +513,11 @@ void testParseHeaderLevel1File() throws IOException { @Test void testParseHeaderLevel1FileMsdosChecksumAndCrc() throws IOException { // The lha file was generated by LHA32 v2.67.00 for Windows - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream( - toByteArray(VALID_HEADER_LEVEL_1_FILE_MSDOS_WITH_CHECKSUM_AND_CRC)), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_1_FILE_MSDOS_WITH_CHECKSUM_AND_CRC))) + .setFileSeparatorChar('/') + .get()) { + LhaArchiveEntry entry; // Check directory entry @@ -539,7 +567,7 @@ void testInvalidHeaderLevel1Length() throws IOException { data[0] = 0x10; // Change the first byte to an invalid length - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header length"); } catch (ArchiveException e) { @@ -553,7 +581,7 @@ void testInvalidHeaderLevel1Checksum() throws IOException { data[1] = 0x55; // Change the second byte to an invalid header checksum - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header checksum"); } catch (ArchiveException e) { @@ -569,7 +597,7 @@ void testInvalidHeaderLevel1Crc() throws IOException { data[41] = 0x33; data[42] = 0x22; - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header checksum"); } catch (ArchiveException e) { @@ -580,7 +608,11 @@ void testInvalidHeaderLevel1Crc() throws IOException { @Test void testParseHeaderLevel1FileWithFoldersMacos() throws IOException { // The lha file was generated by LHa for UNIX version 1.14i-ac20211125 for Macos - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l1.lha"), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newInputStream("test-macos-l1.lha")) + .setFileSeparatorChar('/') + .get()) { + LhaArchiveEntry entry; // Check directory entry @@ -681,7 +713,11 @@ void testParseHeaderLevel1FileWithFoldersMacos() throws IOException { @Test void testParseHeaderLevel1FileWithFoldersMsdos() throws IOException { // The lha file was generated by LHA32 v2.67.00 for Windows - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-msdos-l1.lha"), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newInputStream("test-msdos-l1.lha")) + .setFileSeparatorChar('/') + .get()) { + LhaArchiveEntry entry; // Check directory entry @@ -781,7 +817,11 @@ void testParseHeaderLevel1FileWithFoldersMsdos() throws IOException { @Test void testParseHeaderLevel2File() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_2_FILE)), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(new ByteArrayInputStream(toByteArray(VALID_HEADER_LEVEL_2_FILE))) + .setFileSeparatorChar('/') + .get()) { + // Entry should be parsed correctly final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); @@ -811,7 +851,7 @@ void testInvalidHeaderLevel2Length() throws IOException { data[0] = 0x10; // Change the first byte to an invalid length - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header length"); } catch (ArchiveException e) { @@ -827,7 +867,7 @@ void testInvalidHeaderLevel2Checksum() throws IOException { data[27] = 0x33; data[28] = 0x22; - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(data))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header checksum"); } catch (ArchiveException e) { @@ -838,7 +878,11 @@ void testInvalidHeaderLevel2Checksum() throws IOException { @Test void testParseHeaderLevel2FileWithFoldersAmiga() throws IOException { // The lha file was generated by LhA 2.15 on Amiga - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-amiga-l2.lha"), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newInputStream("test-amiga-l2.lha")) + .setFileSeparatorChar('/') + .get()) { + LhaArchiveEntry entry; // No -lhd- directory entries in Amiga LHA files, so we expect only file entries @@ -887,7 +931,11 @@ void testParseHeaderLevel2FileWithFoldersAmiga() throws IOException { @Test void testParseHeaderLevel2FileWithFoldersMacos() throws IOException { // The lha file was generated by LHa for UNIX version 1.14i-ac20211125 for Macos - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l2.lha"), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newInputStream("test-macos-l2.lha")) + .setFileSeparatorChar('/') + .get()) { + LhaArchiveEntry entry; // Check directory entry @@ -988,7 +1036,11 @@ void testParseHeaderLevel2FileWithFoldersMacos() throws IOException { @Test void testParseHeaderLevel2FileWithFoldersMsdos() throws IOException { // The lha file was generated by LHA32 v2.67.00 for Windows - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-msdos-l2.lha"), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newInputStream("test-msdos-l2.lha")) + .setFileSeparatorChar('/') + .get()) { + LhaArchiveEntry entry; // Check directory entry @@ -1089,7 +1141,7 @@ void testParseHeaderLevel2FileWithFoldersMsdos() throws IOException { @Test void testParseHeaderLevel2FileWithMsdosAttributes() throws IOException { // The lha file was generated by LHA32 v2.67.00 for Windows - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-msdos-l2-attrib.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-msdos-l2-attrib.lha")).get()) { // Check file entry final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); @@ -1115,7 +1167,7 @@ void testParseHeaderLevel2FileWithMsdosAttributes() throws IOException { @Test void testParseExtendedHeaderCommon() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x00, 0x22, 0x33, 0x00, 0x00), entryBuilder); assertEquals(0x3322, entryBuilder.get().getHeaderCrc()); @@ -1124,7 +1176,7 @@ void testParseExtendedHeaderCommon() throws IOException { @Test void testParseExtendedHeaderFilename() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entryBuilder); assertEquals("test.txt", entryBuilder.get().getName()); @@ -1133,7 +1185,11 @@ void testParseExtendedHeaderFilename() throws IOException { @Test void testParseExtendedHeaderDirectoryName() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newEmptyInputStream()) + .setFileSeparatorChar('/') + .get()) { + final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entryBuilder); assertEquals("dir1/", entryBuilder.get().getName()); @@ -1142,7 +1198,11 @@ void testParseExtendedHeaderDirectoryName() throws IOException { @Test void testParseExtendedHeaderFilenameAndDirectoryName() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '/')) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newEmptyInputStream()) + .setFileSeparatorChar('/') + .get()) { + LhaArchiveEntry.Builder entryBuilder; // Test filename and directory name order @@ -1173,7 +1233,7 @@ void testParseExtendedHeaderFilenameAndDirectoryName() throws IOException { @Test void testParseExtendedHeaderUnixPermission() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x50, 0xa4, 0x81, 0x00, 0x00), entryBuilder); assertEquals(0x81a4, entryBuilder.get().getUnixPermissionMode()); @@ -1183,7 +1243,7 @@ void testParseExtendedHeaderUnixPermission() throws IOException { @Test void testParseExtendedHeaderUnixUidGid() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x51, 0x14, 0x00, 0xf5, 0x01, 0x00, 0x00), entryBuilder); assertEquals(0x0014, entryBuilder.get().getUnixGroupId()); @@ -1193,7 +1253,7 @@ void testParseExtendedHeaderUnixUidGid() throws IOException { @Test void testParseExtendedHeaderUnixTimestamp() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x54, 0x5c, 0x73, 0x9c, 0x68, 0x00, 0x00), entryBuilder); assertEquals(0x689c735cL, entryBuilder.get().getLastModifiedDate().getTime() / 1000); @@ -1202,7 +1262,7 @@ void testParseExtendedHeaderUnixTimestamp() throws IOException { @Test void testParseExtendedHeaderMSdosFileAttributes() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x40, 0x10, 0x00, 0x00), entryBuilder); assertEquals(0x10, entryBuilder.get().getMsdosFileAttributes()); @@ -1211,7 +1271,10 @@ void testParseExtendedHeaderMSdosFileAttributes() throws IOException { @Test void testDecompressLh0() throws Exception { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0.lha"), null, File.separatorChar)) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() + .setInputStream(newInputStream("test-macos-l0.lha")) + .get()) { + final List files = new ArrayList<>(); files.add("dir1" + File.separatorChar); files.add("dir1" + File.separatorChar + "dir1-1" + File.separatorChar); @@ -1224,7 +1287,7 @@ void testDecompressLh0() throws Exception { @Test void testDecompressLh4() throws Exception { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-amiga-l0-lh4.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-amiga-l0-lh4.lha")).get()) { final List files = new ArrayList<>(); files.add("lorem-ipsum.txt"); checkArchiveContent(archive, files); @@ -1233,7 +1296,7 @@ void testDecompressLh4() throws Exception { @Test void testDecompressLh5() throws Exception { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh5.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-macos-l0-lh5.lha")).get()) { final List files = new ArrayList<>(); files.add("lorem-ipsum.txt"); checkArchiveContent(archive, files); @@ -1246,7 +1309,7 @@ void testDecompressLh5() throws Exception { */ @Test void testDecompressLh5Rle() throws Exception { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh5-rle.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-macos-l0-lh5-rle.lha")).get()) { final List files = new ArrayList<>(); files.add("rle.txt"); checkArchiveContent(archive, files); @@ -1255,7 +1318,7 @@ void testDecompressLh5Rle() throws Exception { @Test void testDecompressLh6() throws Exception { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh6.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-macos-l0-lh6.lha")).get()) { final List files = new ArrayList<>(); files.add("lorem-ipsum.txt"); checkArchiveContent(archive, files); @@ -1264,7 +1327,7 @@ void testDecompressLh6() throws Exception { @Test void testDecompressLh7() throws Exception { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh7.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-macos-l0-lh7.lha")).get()) { final List files = new ArrayList<>(); files.add("lorem-ipsum.txt"); checkArchiveContent(archive, files); @@ -1309,7 +1372,7 @@ void testGetCompressionMethod() throws IOException { @Test void testGetPathnameUnixFileSeparatorCharDefaultEncoding() throws IOException, UnsupportedEncodingException { - try (LhaArchiveInputStream is = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '/')) { + try (LhaArchiveInputStream is = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).setFileSeparatorChar('/').get()) { assertEquals("folder/", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff)); assertEquals("folder/file.txt", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); assertEquals("folder/file.txt", getPathname(is, 0xff, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); @@ -1323,7 +1386,12 @@ void testGetPathnameUnixFileSeparatorCharDefaultEncoding() throws IOException, U @Test void testGetPathnameUnixFileSeparatorCharIso88591() throws IOException, UnsupportedEncodingException { - try (LhaArchiveInputStream is = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), "ISO-8859-1", '/')) { + try (LhaArchiveInputStream is = LhaArchiveInputStream.builder() + .setInputStream(newEmptyInputStream()) + .setCharset(StandardCharsets.ISO_8859_1) + .setFileSeparatorChar('/') + .get()) { + assertEquals("\u00E5/\u00E4/\u00F6.txt", getPathname(is, 0xe5, 0xff, 0xe4, 0xff, 0xf6, '.', 't', 'x', 't')); assertEquals("\u00E5/\u00E4/\u00F6.txt", getPathname(is, 0xe5, '\\', 0xe4, '\\', 0xf6, '.', 't', 'x', 't')); } @@ -1331,7 +1399,7 @@ void testGetPathnameUnixFileSeparatorCharIso88591() throws IOException, Unsuppor @Test void testGetPathnameWindowsFileSeparatorCharDefaultEncoding() throws IOException, UnsupportedEncodingException { - try (LhaArchiveInputStream is = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '\\')) { + try (LhaArchiveInputStream is = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).setFileSeparatorChar('\\').get()) { assertEquals("folder\\", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff)); assertEquals("folder\\file.txt", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); assertEquals("folder\\file.txt", getPathname(is, 0xff, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); @@ -1345,7 +1413,12 @@ void testGetPathnameWindowsFileSeparatorCharDefaultEncoding() throws IOException @Test void testGetPathnameWindowsFileSeparatorCharIso88591() throws IOException, UnsupportedEncodingException { - try (LhaArchiveInputStream is = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), "ISO-8859-1", '\\')) { + try (LhaArchiveInputStream is = LhaArchiveInputStream.builder() + .setInputStream(newEmptyInputStream()) + .setCharset(StandardCharsets.ISO_8859_1) + .setFileSeparatorChar('\\') + .get()) { + assertEquals("\u00E5\\\u00E4\\\u00F6.txt", getPathname(is, 0xe5, 0xff, 0xe4, 0xff, 0xf6, '.', 't', 'x', 't')); assertEquals("\u00E5\\\u00E4\\\u00F6.txt", getPathname(is, 0xe5, '\\', 0xe4, '\\', 0xf6, '.', 't', 'x', 't')); } @@ -1367,6 +1440,10 @@ private String getPathname(final LhaArchiveInputStream is, final int... filepath return is.getPathname(ByteBuffer.wrap(toByteArray(filepathBuffer)), filepathBuffer.length); } + private InputStream newEmptyInputStream() { + return new ByteArrayInputStream(new byte[0]); + } + /** * The timestamp used in header level 0 and 1 entries has no time zone information and is * converted in the system default time zone. This method converts the date to UTC to verify diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java index f336a6f2c1a..633b33d6c30 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java @@ -49,7 +49,7 @@ void testConfiguration() throws IOException { @Test void testDecompress() throws IOException { // This file was created using LhA 1.38 on Amiga - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-amiga-l0-lh4.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-amiga-l0-lh4.lha")).get()) { // Check entry final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java index 64413a72b9b..52d314ffedb 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java @@ -48,7 +48,7 @@ void testConfiguration() throws IOException { @Test void testDecompress() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh5.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-macos-l0-lh5.lha")).get()) { // Check entry final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java index e359b30b978..a9645f7055f 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java @@ -48,7 +48,7 @@ void testConfiguration() throws IOException { @Test void testDecompress() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh6.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream .builder().setInputStream(newInputStream("test-macos-l0-lh6.lha")).get()) { // Check entry final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java index a54e16a1fc6..47e767ba879 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java @@ -48,7 +48,7 @@ void testConfiguration() throws IOException { @Test void testDecompress() throws IOException { - try (LhaArchiveInputStream archive = new LhaArchiveInputStream(newInputStream("test-macos-l0-lh7.lha"))) { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-macos-l0-lh7.lha")).get()) { // Check entry final LhaArchiveEntry entry = archive.getNextEntry(); assertNotNull(entry); From 3930c9c48b8764aea46b0506ba6de867f931441e Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 6 Sep 2025 15:57:08 +0200 Subject: [PATCH 16/22] Check maximum pathname length --- .../archivers/lha/LhaArchiveInputStream.java | 13 ++++++++++++- .../archivers/lha/LhaArchiveInputStreamTest.java | 15 ++++++++++++++- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index 453a78b35ee..ce6ff4cde6a 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -194,6 +194,11 @@ Builder setFileSeparatorChar(final char fileSeparatorChar) { private static final String COMPRESSION_METHOD_LH7 = "-lh7-"; private static final String COMPRESSION_METHOD_LZ4 = "-lz4-"; + /** + * Maximum length of a pathname. + */ + private static final int MAX_PATHNAME_LENGTH = 4096; + private final char fileSeparatorChar; private LhaArchiveEntry currentEntry; private InputStream currentCompressedStream; @@ -517,8 +522,14 @@ static String getCompressionMethod(final ByteBuffer buffer) throws ArchiveExcept * @param buffer the buffer where to get the pathname from * @param pathnameLength the length of the pathname * @return pathname + * @throws ArchiveException if the pathname is too long */ - String getPathname(final ByteBuffer buffer, final int pathnameLength) { + String getPathname(final ByteBuffer buffer, final int pathnameLength) throws ArchiveException { + // Check pathname length to ensure we don't allocate too much memory + if (pathnameLength > MAX_PATHNAME_LENGTH) { + throw new ArchiveException("Pathname is longer than the maximum allowed (%d > %d)", pathnameLength, MAX_PATHNAME_LENGTH); + } + final byte[] pathnameBuffer = new byte[pathnameLength]; buffer.get(pathnameBuffer); diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java index 8170df5ea83..8ce354995ef 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java @@ -1411,6 +1411,19 @@ void testGetPathnameWindowsFileSeparatorCharDefaultEncoding() throws IOException } } + @Test + void testGetPathnameTooLong() throws IOException, UnsupportedEncodingException { + try (LhaArchiveInputStream is = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + try { + final byte[] pathname = new byte[4097]; + is.getPathname(ByteBuffer.wrap(pathname), pathname.length); + fail("Expected ArchiveException when pathname is longer than the maximum allowed"); + } catch (ArchiveException e) { + assertEquals("Pathname is longer than the maximum allowed (4097 > 4096)", e.getMessage()); + } + } + } + @Test void testGetPathnameWindowsFileSeparatorCharIso88591() throws IOException, UnsupportedEncodingException { try (LhaArchiveInputStream is = LhaArchiveInputStream.builder() @@ -1436,7 +1449,7 @@ private static ByteBuffer toByteBuffer(final int... data) { return ByteBuffer.wrap(toByteArray(data)).order(ByteOrder.LITTLE_ENDIAN); } - private String getPathname(final LhaArchiveInputStream is, final int... filepathBuffer) throws UnsupportedEncodingException { + private String getPathname(final LhaArchiveInputStream is, final int... filepathBuffer) throws ArchiveException, UnsupportedEncodingException { return is.getPathname(ByteBuffer.wrap(toByteArray(filepathBuffer)), filepathBuffer.length); } From 673b2ab9fab18f4d5c52788cd29c544b3b9f7093 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 6 Sep 2025 18:46:57 +0200 Subject: [PATCH 17/22] Add better length checks for pathnames and extended headers --- .../archivers/lha/LhaArchiveInputStream.java | 51 +++++++- .../lha/LhaArchiveInputStreamTest.java | 112 +++++++++++++++++- 2 files changed, 159 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index ce6ff4cde6a..e8eb3879adb 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -185,6 +185,22 @@ Builder setFileSeparatorChar(final char fileSeparatorChar) { private static final int EXTENDED_HEADER_TYPE_UNIX_UID_GID = 0x51; private static final int EXTENDED_HEADER_TYPE_UNIX_TIMESTAMP = 0x54; + /** + * Length in bytes of the next extended header size field. + */ + private static final int EXTENDED_HEADER_NEXT_HEADER_SIZE_LENGTH = 2; + + /** + * Minimum extended header length. + */ + private static final int MIN_EXTENDED_HEADER_LENGTH = 3; + + private static final int EXTENDED_HEADER_TYPE_COMMON_MIN_PAYLOAD_LENGTH = 2; + private static final int EXTENDED_HEADER_TYPE_MSDOS_FILE_ATTRIBUTES_PAYLOAD_LENGTH = 2; + private static final int EXTENDED_HEADER_TYPE_UNIX_PERMISSION_PAYLOAD_LENGTH = 2; + private static final int EXTENDED_HEADER_TYPE_UNIX_UID_GID_PAYLOAD_LENGTH = 4; + private static final int EXTENDED_HEADER_TYPE_UNIX_TIMESTAMP_PAYLOAD_LENGTH = 4; + // Compression methods private static final String COMPRESSION_METHOD_DIRECTORY = "-lhd-"; // Directory entry private static final String COMPRESSION_METHOD_LH0 = "-lh0-"; @@ -528,6 +544,10 @@ String getPathname(final ByteBuffer buffer, final int pathnameLength) throws Arc // Check pathname length to ensure we don't allocate too much memory if (pathnameLength > MAX_PATHNAME_LENGTH) { throw new ArchiveException("Pathname is longer than the maximum allowed (%d > %d)", pathnameLength, MAX_PATHNAME_LENGTH); + } else if (pathnameLength < 0) { + throw new ArchiveException("Pathname length is negative"); + } else if (pathnameLength > (buffer.limit() - buffer.position())) { + throw new ArchiveException("Invalid pathname length"); } final byte[] pathnameBuffer = new byte[pathnameLength]; @@ -610,9 +630,18 @@ private ByteBuffer readExtendedHeader(final int headerSize) throws IOException { * @throws IOException */ void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final LhaArchiveEntry.Builder entryBuilder) throws IOException { + final int extendedHeaderLength = extendedHeaderBuffer.limit() - extendedHeaderBuffer.position(); + if (extendedHeaderLength < MIN_EXTENDED_HEADER_LENGTH) { + throw new ArchiveException("Invalid extended header length"); + } + final int extendedHeaderType = Byte.toUnsignedInt(extendedHeaderBuffer.get()); if (extendedHeaderType == EXTENDED_HEADER_TYPE_COMMON) { // Common header + if (extendedHeaderLength < (MIN_EXTENDED_HEADER_LENGTH + EXTENDED_HEADER_TYPE_COMMON_MIN_PAYLOAD_LENGTH)) { + throw new ArchiveException("Invalid extended header length"); + } + final int crcPos = extendedHeaderBuffer.position(); // Save the current position to be able to set the header CRC later // Header CRC @@ -622,14 +651,14 @@ void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final LhaArchive extendedHeaderBuffer.putShort(crcPos, (short) 0); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_FILENAME) { // File name header - final int filenameLength = extendedHeaderBuffer.limit() - extendedHeaderBuffer.position() - 2; + final int filenameLength = extendedHeaderBuffer.limit() - extendedHeaderBuffer.position() - EXTENDED_HEADER_NEXT_HEADER_SIZE_LENGTH; final String filename = getPathname(extendedHeaderBuffer, filenameLength); entryBuilder.setFilename(filename); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_DIRECTORY_NAME) { // Directory name header - final int directoryNameLength = extendedHeaderBuffer.limit() - extendedHeaderBuffer.position() - 2; + final int directoryNameLength = extendedHeaderBuffer.limit() - extendedHeaderBuffer.position() - EXTENDED_HEADER_NEXT_HEADER_SIZE_LENGTH; final String directoryName = getPathname(extendedHeaderBuffer, directoryNameLength); - if (directoryName.charAt(directoryName.length() - 1) != fileSeparatorChar) { + if (directoryName.length() > 0 && directoryName.charAt(directoryName.length() - 1) != fileSeparatorChar) { // If the directory name does not end with a file separator, append it entryBuilder.setDirectoryName(directoryName + fileSeparatorChar); } else { @@ -638,16 +667,32 @@ void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final LhaArchive } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_MSDOS_FILE_ATTRIBUTES) { // MS-DOS file attributes + if (extendedHeaderLength != (MIN_EXTENDED_HEADER_LENGTH + EXTENDED_HEADER_TYPE_MSDOS_FILE_ATTRIBUTES_PAYLOAD_LENGTH)) { + throw new ArchiveException("Invalid extended header length"); + } + entryBuilder.setMsdosFileAttributes(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_PERMISSION) { // UNIX file permission + if (extendedHeaderLength != (MIN_EXTENDED_HEADER_LENGTH + EXTENDED_HEADER_TYPE_UNIX_PERMISSION_PAYLOAD_LENGTH)) { + throw new ArchiveException("Invalid extended header length"); + } + entryBuilder.setUnixPermissionMode(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_UID_GID) { // UNIX group/user ID + if (extendedHeaderLength != (MIN_EXTENDED_HEADER_LENGTH + EXTENDED_HEADER_TYPE_UNIX_UID_GID_PAYLOAD_LENGTH)) { + throw new ArchiveException("Invalid extended header length"); + } + entryBuilder.setUnixGroupId(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); entryBuilder.setUnixUserId(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_TIMESTAMP) { // UNIX last modified time + if (extendedHeaderLength != (MIN_EXTENDED_HEADER_LENGTH + EXTENDED_HEADER_TYPE_UNIX_TIMESTAMP_PAYLOAD_LENGTH)) { + throw new ArchiveException("Invalid extended header length"); + } + entryBuilder.setLastModifiedDate(new Date(Integer.toUnsignedLong(extendedHeaderBuffer.getInt()) * 1000)); } diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java index 8ce354995ef..94dd3a4bc41 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java @@ -1165,21 +1165,52 @@ void testParseHeaderLevel2FileWithMsdosAttributes() throws IOException { } } + @Test + void testParseExtendedHeaderTooShort() throws IOException { + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); + try { + archive.parseExtendedHeader(toByteBuffer(0x00, 0x00), entryBuilder); + fail("Expected ArchiveException for invalid extended header length"); + } catch (ArchiveException e) { + assertEquals("Invalid extended header length", e.getMessage()); + } + } + } + @Test void testParseExtendedHeaderCommon() throws IOException { try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + // Valid final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x00, 0x22, 0x33, 0x00, 0x00), entryBuilder); assertEquals(0x3322, entryBuilder.get().getHeaderCrc()); + + // Invalid length + try { + archive.parseExtendedHeader(toByteBuffer(0x00, 0x22, 0x00, 0x00), entryBuilder); + fail("Expected ArchiveException for invalid extended header length"); + } catch (ArchiveException e) { + assertEquals("Invalid extended header length", e.getMessage()); + } } } @Test void testParseExtendedHeaderFilename() throws IOException { try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + // Valid final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entryBuilder); assertEquals("test.txt", entryBuilder.get().getName()); + + // Invalid length + try { + archive.parseExtendedHeader(toByteBuffer(0x01, 0x00), entryBuilder); + fail("Expected ArchiveException for invalid extended header length"); + } catch (ArchiveException e) { + assertEquals("Invalid extended header length", e.getMessage()); + } } } @@ -1190,9 +1221,18 @@ void testParseExtendedHeaderDirectoryName() throws IOException { .setFileSeparatorChar('/') .get()) { + // Valid final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entryBuilder); assertEquals("dir1/", entryBuilder.get().getName()); + + // Invalid length + try { + archive.parseExtendedHeader(toByteBuffer(0x02, 0x00), entryBuilder); + fail("Expected ArchiveException for invalid extended header length"); + } catch (ArchiveException e) { + assertEquals("Invalid extended header length", e.getMessage()); + } } } @@ -1228,44 +1268,86 @@ void testParseExtendedHeaderFilenameAndDirectoryName() throws IOException { archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0x00, 0x00), entryBuilder); archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entryBuilder); assertEquals("dir1/test.txt", entryBuilder.get().getName()); + + // Test empty directory name, no trailing slash + entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x02, 0x00, 0x00), entryBuilder); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entryBuilder); + assertEquals("test.txt", entryBuilder.get().getName()); } } @Test void testParseExtendedHeaderUnixPermission() throws IOException { try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + // Valid final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x50, 0xa4, 0x81, 0x00, 0x00), entryBuilder); assertEquals(0x81a4, entryBuilder.get().getUnixPermissionMode()); assertEquals(0100644, entryBuilder.get().getUnixPermissionMode()); + + // Invalid length + try { + archive.parseExtendedHeader(toByteBuffer(0x50, 0xa4, 0x00, 0x00), entryBuilder); + fail("Expected ArchiveException for invalid extended header length"); + } catch (ArchiveException e) { + assertEquals("Invalid extended header length", e.getMessage()); + } } } @Test void testParseExtendedHeaderUnixUidGid() throws IOException { try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + // Valid final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x51, 0x14, 0x00, 0xf5, 0x01, 0x00, 0x00), entryBuilder); assertEquals(0x0014, entryBuilder.get().getUnixGroupId()); assertEquals(0x01f5, entryBuilder.get().getUnixUserId()); + + // Invalid length + try { + archive.parseExtendedHeader(toByteBuffer(0x51, 0x14, 0x00, 0xf5, 0x00, 0x00), entryBuilder); + fail("Expected ArchiveException for invalid extended header length"); + } catch (ArchiveException e) { + assertEquals("Invalid extended header length", e.getMessage()); + } } } @Test void testParseExtendedHeaderUnixTimestamp() throws IOException { try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + // Valid final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); archive.parseExtendedHeader(toByteBuffer(0x54, 0x5c, 0x73, 0x9c, 0x68, 0x00, 0x00), entryBuilder); assertEquals(0x689c735cL, entryBuilder.get().getLastModifiedDate().getTime() / 1000); + + // Invalid length + try { + archive.parseExtendedHeader(toByteBuffer(0x54, 0x5c, 0x73, 0x9c, 0x00, 0x00), entryBuilder); + fail("Expected ArchiveException for invalid extended header length"); + } catch (ArchiveException e) { + assertEquals("Invalid extended header length", e.getMessage()); + } } } @Test void testParseExtendedHeaderMSdosFileAttributes() throws IOException { try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + // Valid final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); - archive.parseExtendedHeader(toByteBuffer(0x40, 0x10, 0x00, 0x00), entryBuilder); + archive.parseExtendedHeader(toByteBuffer(0x40, 0x10, 0x00, 0x00, 0x00), entryBuilder); assertEquals(0x10, entryBuilder.get().getMsdosFileAttributes()); + + // Invalid length + try { + archive.parseExtendedHeader(toByteBuffer(0x40, 0x10, 0x00, 0x00), entryBuilder); + fail("Expected ArchiveException for invalid extended header length"); + } catch (ArchiveException e) { + assertEquals("Invalid extended header length", e.getMessage()); + } } } @@ -1373,6 +1455,9 @@ void testGetCompressionMethod() throws IOException { @Test void testGetPathnameUnixFileSeparatorCharDefaultEncoding() throws IOException, UnsupportedEncodingException { try (LhaArchiveInputStream is = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).setFileSeparatorChar('/').get()) { + assertEquals("", getPathname(is)); + assertEquals("", getPathname(is, 0xff)); + assertEquals("a", getPathname(is, 'a')); assertEquals("folder/", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff)); assertEquals("folder/file.txt", getPathname(is, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); assertEquals("folder/file.txt", getPathname(is, 0xff, 'f', 'o', 'l', 'd', 'e', 'r', 0xff, 'f', 'i', 'l', 'e', '.', 't', 'x', 't')); @@ -1411,6 +1496,18 @@ void testGetPathnameWindowsFileSeparatorCharDefaultEncoding() throws IOException } } + @Test + void testGetPathnameNegativeLength() throws IOException, UnsupportedEncodingException { + try (LhaArchiveInputStream is = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + try { + is.getPathname(ByteBuffer.wrap(new byte[0]), -1); + fail("Expected ArchiveException when pathname length is negative"); + } catch (ArchiveException e) { + assertEquals("Pathname length is negative", e.getMessage()); + } + } + } + @Test void testGetPathnameTooLong() throws IOException, UnsupportedEncodingException { try (LhaArchiveInputStream is = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { @@ -1424,6 +1521,19 @@ void testGetPathnameTooLong() throws IOException, UnsupportedEncodingException { } } + @Test + void testGetPathnameInvalidLength() throws IOException, UnsupportedEncodingException { + try (LhaArchiveInputStream is = LhaArchiveInputStream.builder().setInputStream(newEmptyInputStream()).get()) { + try { + final byte[] pathname = new byte[] { 'a', 'b', 'c' }; + is.getPathname(ByteBuffer.wrap(pathname), pathname.length + 1); + fail("Expected ArchiveException for invalid pathname length"); + } catch (ArchiveException e) { + assertEquals("Invalid pathname length", e.getMessage()); + } + } + } + @Test void testGetPathnameWindowsFileSeparatorCharIso88591() throws IOException, UnsupportedEncodingException { try (LhaArchiveInputStream is = LhaArchiveInputStream.builder() From be76464fc1eccf76d1e98131c5266f1e64ebbde8 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 6 Sep 2025 19:23:46 +0200 Subject: [PATCH 18/22] Check for too many or no leaf nodes --- .../compress/compressors/lha/BinaryTree.java | 15 ++++++++--- .../compressors/lha/BinaryTreeTest.java | 26 ++++++++++++++++--- 2 files changed, 35 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java b/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java index 680e198dd9a..416b6b853dc 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java @@ -54,8 +54,9 @@ class BinaryTree { * be read from the bit stream when the read method is called, as there are no children to traverse. * * @param array the array to build the binary tree from + * @throws CompressorException if the tree is invalid */ - BinaryTree(final int... array) { + BinaryTree(final int... array) throws CompressorException { if (array.length == 1) { // Tree only contains a single value, which is the root node value this.tree = new int[] { array[0] }; @@ -64,6 +65,9 @@ class BinaryTree { // Determine the maximum depth of the tree from the input array final int maxDepth = Arrays.stream(array).max().getAsInt(); + if (maxDepth == 0) { + throw new CompressorException("Tree contains no leaf nodes"); + } // Allocate binary tree with enough space for all nodes this.tree = initTree(maxDepth); @@ -82,6 +86,10 @@ class BinaryTree { // Add leaf nodes for values with the current depth for (int value = 0; value < array.length; value++) { if (array[value] == currentDepth) { + if (numNodesAtCurrentDepth == maxNodesAtCurrentDepth) { + throw new CompressorException("Tree contains too many leaf nodes for depth %d", currentDepth); + } + this.tree[treePos++] = value; // Add leaf (value) node numNodesAtCurrentDepth++; } @@ -109,10 +117,11 @@ class BinaryTree { * * @param depth the depth of the tree, must be between 0 and 16 (inclusive) * @return an array representing the binary tree, initialized with UNDEFINED values + * @throws CompressorException for invalid depth */ - private int[] initTree(final int depth) { + private int[] initTree(final int depth) throws CompressorException { if (depth < 0 || depth > 16) { - throw new IllegalArgumentException("Depth must not be negative and not bigger than 16 but is " + depth); + throw new CompressorException("Tree depth must not be negative and not bigger than 16 but is " + depth); } final int arraySize = depth == 0 ? 1 : (int) ((1L << depth + 1) - 1); // Depth 0 has only a single node (the root) diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java index 81474bfa1b5..18ab1135f32 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java @@ -228,9 +228,29 @@ void testInvalidBitstream() throws Exception { void testCheckMaxDepth() throws Exception { try { new BinaryTree(1, 17); - fail("Expected IllegalArgumentException for depth > 16"); - } catch (IllegalArgumentException e) { - assertEquals("Depth must not be negative and not bigger than 16 but is 17", e.getMessage()); + fail("Expected CompressorException for depth > 16"); + } catch (CompressorException e) { + assertEquals("Tree depth must not be negative and not bigger than 16 but is 17", e.getMessage()); + } + } + + @Test + void testTooManyLeafNodes() throws Exception { + try { + new BinaryTree(0, 2, 1, 2, 2); + fail("Expected CompressorException for too many leaf nodes"); + } catch (CompressorException e) { + assertEquals("Tree contains too many leaf nodes for depth 2", e.getMessage()); + } + } + + @Test + void testNoLeafNodes() throws Exception { + try { + new BinaryTree(0, 0, 0, 0, 0); + fail("Expected CompressorException for no leaf nodes"); + } catch (CompressorException e) { + assertEquals("Tree contains no leaf nodes", e.getMessage()); } } From 91969bf7e130f52515be58456a9ece54a86f2c6a Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sun, 7 Sep 2025 08:16:42 +0200 Subject: [PATCH 19/22] Better handling of corrupt archives --- .../archivers/lha/LhaArchiveInputStream.java | 51 +++++++++++++------ .../lha/LhaArchiveInputStreamTest.java | 32 +++++++++++- 2 files changed, 66 insertions(+), 17 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index e8eb3879adb..1ac01e8175e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -343,12 +343,13 @@ LhaArchiveEntry readHeader() throws IOException { * @throws IOException */ LhaArchiveEntry readHeaderLevel0(ByteBuffer buffer) throws IOException { - final int headerSize = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_HEADER_SIZE)); + // Add two to the header size as the first two bytes are not included + final int headerSize = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_HEADER_SIZE)) + 2; if (headerSize < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { throw new ArchiveException("Invalid header level 0 length: %d", headerSize); } - buffer = readRemainingHeaderData(buffer, headerSize + 2); // Header size is not including the first two bytes of the header + buffer = readRemainingHeaderData(buffer, headerSize); final int headerChecksum = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_HEADER_CHECKSUM)); @@ -361,6 +362,12 @@ LhaArchiveEntry readHeaderLevel0(ByteBuffer buffer) throws IOException { .setLastModifiedDate(new Date(ZipUtil.dosToJavaTime(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_LAST_MODIFIED_DATE_TIME))))); final int filenameLength = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_FILENAME_LENGTH)); + + // Make sure the filename is not overflowing into the CRC field + if (filenameLength > (headerSize - HEADER_LEVEL_0_OFFSET_FILENAME - 2)) { + throw new ArchiveException("Invalid pathname length"); + } + buffer.position(HEADER_LEVEL_0_OFFSET_FILENAME); entryBuilder.setFilename(getPathname(buffer, filenameLength)) .setDirectory(isDirectory(compressionMethod)) @@ -385,12 +392,13 @@ LhaArchiveEntry readHeaderLevel0(ByteBuffer buffer) throws IOException { * @throws IOException */ LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException { - final int baseHeaderSize = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_BASE_HEADER_SIZE)); + // Add two to the header size as the first two bytes are not included + final int baseHeaderSize = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_BASE_HEADER_SIZE)) + 2; if (baseHeaderSize < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { throw new ArchiveException("Invalid header level 1 length: %d", baseHeaderSize); } - buffer = readRemainingHeaderData(buffer, baseHeaderSize + 2); + buffer = readRemainingHeaderData(buffer, baseHeaderSize); final int baseHeaderChecksum = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_BASE_HEADER_CHECKSUM)); @@ -403,6 +411,14 @@ LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException { .setLastModifiedDate(new Date(ZipUtil.dosToJavaTime(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_1_OFFSET_LAST_MODIFIED_DATE_TIME))))); final int filenameLength = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_FILENAME_LENGTH)); + + // Make sure the filename is not overflowing into the CRC, OS ID and first extended header length fields. + // This check is not bulletproof because there might also be an extended area after the filename that + // we cannot detect for corrupt archives. + if (filenameLength > (baseHeaderSize - HEADER_LEVEL_1_OFFSET_FILENAME - 5)) { + throw new ArchiveException("Invalid pathname length"); + } + buffer.position(HEADER_LEVEL_1_OFFSET_FILENAME); entryBuilder.setFilename(getPathname(buffer, filenameLength)) .setDirectory(isDirectory(compressionMethod)) @@ -418,16 +434,17 @@ LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException { final List headerParts = new ArrayList<>(); headerParts.add(buffer); - int nextHeaderSize = Short.toUnsignedInt(buffer.getShort()); - while (nextHeaderSize > 0) { - final ByteBuffer extendedHeaderBuffer = readExtendedHeader(nextHeaderSize); - skipSize -= nextHeaderSize; + buffer.position(baseHeaderSize - 2); // First extended header length is at the end of the base header + int extendedHeaderSize = Short.toUnsignedInt(buffer.getShort()); + while (extendedHeaderSize > 0) { + final ByteBuffer extendedHeaderBuffer = readExtendedHeader(extendedHeaderSize); + skipSize -= extendedHeaderSize; parseExtendedHeader(extendedHeaderBuffer, entryBuilder); headerParts.add(extendedHeaderBuffer); - nextHeaderSize = Short.toUnsignedInt(extendedHeaderBuffer.getShort(extendedHeaderBuffer.limit() - 2)); + extendedHeaderSize = Short.toUnsignedInt(extendedHeaderBuffer.getShort(extendedHeaderBuffer.limit() - 2)); } entryBuilder.setCompressedSize(skipSize); @@ -473,17 +490,21 @@ LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException { .setCrcValue(Short.toUnsignedInt(buffer.getShort(HEADER_LEVEL_2_OFFSET_CRC))) .setOsId(Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_2_OFFSET_OS_ID))); - int extendedHeaderOffset = HEADER_LEVEL_2_OFFSET_FIRST_EXTENDED_HEADER_SIZE; - int nextHeaderSize = Short.toUnsignedInt(buffer.getShort(extendedHeaderOffset)); - while (nextHeaderSize > 0) { + int extendedHeaderSize = Short.toUnsignedInt(buffer.getShort(HEADER_LEVEL_2_OFFSET_FIRST_EXTENDED_HEADER_SIZE)); + int extendedHeaderOffset = HEADER_LEVEL_2_OFFSET_FIRST_EXTENDED_HEADER_SIZE + 2; + while (extendedHeaderSize > 0) { + if ((extendedHeaderOffset + extendedHeaderSize) > buffer.limit()) { + throw new ArchiveException("Invalid extended header length"); + } + // Create new ByteBuffer as a slice from the full header. Set limit to the extended header length. - final ByteBuffer extendedHeaderBuffer = byteBufferSlice(buffer, extendedHeaderOffset + 2, nextHeaderSize).order(ByteOrder.LITTLE_ENDIAN); + final ByteBuffer extendedHeaderBuffer = byteBufferSlice(buffer, extendedHeaderOffset, extendedHeaderSize).order(ByteOrder.LITTLE_ENDIAN); - extendedHeaderOffset += nextHeaderSize; + extendedHeaderOffset += extendedHeaderSize; parseExtendedHeader(extendedHeaderBuffer, entryBuilder); - nextHeaderSize = Short.toUnsignedInt(extendedHeaderBuffer.getShort(extendedHeaderBuffer.limit() - 2)); + extendedHeaderSize = Short.toUnsignedInt(extendedHeaderBuffer.getShort(extendedHeaderBuffer.limit() - 2)); } final LhaArchiveEntry entry = entryBuilder.get(); diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java index 94dd3a4bc41..9f5b1a3537d 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java @@ -254,7 +254,7 @@ void testInvalidHeaderLevel0Length() throws IOException { archive.getNextEntry(); fail("Expected ArchiveException for invalid header length"); } catch (ArchiveException e) { - assertEquals("Invalid header level 0 length: 16", e.getMessage()); + assertEquals("Invalid header level 0 length: 18", e.getMessage()); } } @@ -272,6 +272,20 @@ void testInvalidHeaderLevel0Checksum() throws IOException { } } + @Test + void testInvalidHeaderLevel0FilenameLength() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_0_FILE); + + data[21] = 22; // Change the length of the filename + + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid filename"); + } catch (ArchiveException e) { + assertEquals("Invalid pathname length", e.getMessage()); + } + } + @Test void testParseHeaderLevel0FileWithFoldersMacos() throws IOException { // The lha file was generated by LHa for UNIX version 1.14i-ac20211125 for Macos @@ -571,7 +585,7 @@ void testInvalidHeaderLevel1Length() throws IOException { archive.getNextEntry(); fail("Expected ArchiveException for invalid header length"); } catch (ArchiveException e) { - assertEquals("Invalid header level 1 length: 16", e.getMessage()); + assertEquals("Invalid header level 1 length: 18", e.getMessage()); } } @@ -605,6 +619,20 @@ void testInvalidHeaderLevel1Crc() throws IOException { } } + @Test + void testInvalidHeaderLevel1FilenameLength() throws IOException { + final byte[] data = toByteArray(VALID_HEADER_LEVEL_1_FILE); + + data[21] = 10; // Change the length of the filename + + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { + archive.getNextEntry(); + fail("Expected ArchiveException for invalid filename"); + } catch (ArchiveException e) { + assertEquals("Invalid pathname length", e.getMessage()); + } + } + @Test void testParseHeaderLevel1FileWithFoldersMacos() throws IOException { // The lha file was generated by LHa for UNIX version 1.14i-ac20211125 for Macos From 1d17f23ff833eea7d46c96673cd94f307e552922 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sun, 7 Sep 2025 15:30:57 +0200 Subject: [PATCH 20/22] Add test case for LArc lz4 compression method --- .../lha/LhaArchiveInputStreamTest.java | 37 ++++++++++++++++++ src/test/resources/test-msdos-l0-lz4.lzs | Bin 0 -> 48 bytes 2 files changed, 37 insertions(+) create mode 100644 src/test/resources/test-msdos-l0-lz4.lzs diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java index 9f5b1a3537d..67e35919fed 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java @@ -495,6 +495,33 @@ void testParseHeaderLevel0FileWithFoldersMsdos() throws IOException { } } + @Test + void testParseHeaderLevel0Larc() throws IOException { + // This archive was created using LArc 3.33 on MS-DOS + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-msdos-l0-lz4.lzs")).get()) { + // Check file entry + final LhaArchiveEntry entry = archive.getNextEntry(); + assertNotNull(entry); + assertEquals("TEST1.TXT", entry.getName()); + assertFalse(entry.isDirectory()); + assertEquals(14, entry.getSize()); + assertEquals(1757247072000L, convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate()).toInstant().toEpochMilli()); + assertEquals(ZonedDateTime.parse("2025-09-07T12:11:12Z"), convertSystemTimeZoneDateToUTC(entry.getLastModifiedDate())); + assertEquals(14, entry.getCompressedSize()); + assertEquals("-lz4-", entry.getCompressionMethod()); + assertEquals(0xc9b4, entry.getCrcValue()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); + + // No more entries expected + assertNull(archive.getNextEntry()); + } + } + @Test void testParseHeaderLevel1File() throws IOException { try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder() @@ -1444,6 +1471,16 @@ void testDecompressLh7() throws Exception { } } + @Test + void testDecompressLz4() throws Exception { + // This archive was created using LArc 3.33 on MS-DOS + try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(newInputStream("test-msdos-l0-lz4.lzs")).get()) { + final List files = new ArrayList<>(); + files.add("TEST1.TXT"); + checkArchiveContent(archive, files); + } + } + @Test void testMatches() { byte[] data; diff --git a/src/test/resources/test-msdos-l0-lz4.lzs b/src/test/resources/test-msdos-l0-lz4.lzs new file mode 100644 index 0000000000000000000000000000000000000000..9eab0921ff3eda6ce0430c77ce5993ddb1c7aed8 GIT binary patch literal 48 zcmb1_)6J Date: Tue, 16 Sep 2025 20:42:09 +0200 Subject: [PATCH 21/22] Better handling of corrupt archives --- .../commons/compress/archivers/lha/LhaArchiveInputStream.java | 3 ++- .../compress/archivers/lha/LhaArchiveInputStreamTest.java | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index 1ac01e8175e..f8f451f9f3e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -166,6 +166,7 @@ Builder setFileSeparatorChar(final char fileSeparatorChar) { private static final int HEADER_LEVEL_1_OFFSET_FILENAME = 22; // Header Level 2 + private static final int HEADER_LEVEL_2_MINIMUM_HEADER_LENGTH = 26; private static final int HEADER_LEVEL_2_OFFSET_HEADER_SIZE = 0; private static final int HEADER_LEVEL_2_OFFSET_COMPRESSED_SIZE = 7; private static final int HEADER_LEVEL_2_OFFSET_ORIGINAL_SIZE = 11; @@ -473,7 +474,7 @@ LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException { */ LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException { final int headerSize = Short.toUnsignedInt(buffer.getShort(HEADER_LEVEL_2_OFFSET_HEADER_SIZE)); - if (headerSize < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { + if (headerSize < HEADER_LEVEL_2_MINIMUM_HEADER_LENGTH) { throw new ArchiveException("Invalid header level 2 length: %d", headerSize); } diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java index 67e35919fed..bd1176ea3f7 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java @@ -904,13 +904,13 @@ void testParseHeaderLevel2File() throws IOException { void testInvalidHeaderLevel2Length() throws IOException { final byte[] data = toByteArray(VALID_HEADER_LEVEL_2_FILE); - data[0] = 0x10; // Change the first byte to an invalid length + data[0] = 25; // Change the first byte to an invalid length try (LhaArchiveInputStream archive = LhaArchiveInputStream.builder().setInputStream(new ByteArrayInputStream(data)).get()) { archive.getNextEntry(); fail("Expected ArchiveException for invalid header length"); } catch (ArchiveException e) { - assertEquals("Invalid header level 2 length: 16", e.getMessage()); + assertEquals("Invalid header level 2 length: 25", e.getMessage()); } } From 067d583a2076336f87212d8210b5099890b87378 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 20 Sep 2025 15:49:09 +0200 Subject: [PATCH 22/22] Align with new ArchiveInputStream builder classes --- .../archivers/ArchiveStreamFactory.java | 6 +- .../archivers/lha/LhaArchiveInputStream.java | 69 +++++-------------- .../archivers/ArchiveStreamFactoryTest.java | 10 ++- 3 files changed, 29 insertions(+), 56 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index 282ff006336..94c8fbeab0b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -46,7 +46,6 @@ import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.Sets; -import org.apache.commons.io.Charsets; import org.apache.commons.lang3.StringUtils; /** @@ -463,10 +462,11 @@ public > I createArchiveInp return (I) arjBuilder.get(); } if (LHA.equalsIgnoreCase(archiverName)) { + final LhaArchiveInputStream.Builder lhaBuilder = LhaArchiveInputStream.builder().setInputStream(in); if (actualEncoding != null) { - return (I) LhaArchiveInputStream.builder().setInputStream(in).setCharset(Charsets.toCharset(actualEncoding)).get(); + lhaBuilder.setCharset(actualEncoding); } - return (I) LhaArchiveInputStream.builder().setInputStream(in).get(); + return (I) lhaBuilder.get(); } if (ZIP.equalsIgnoreCase(archiverName)) { final ZipArchiveInputStream.Builder zipBuilder = ZipArchiveInputStream.builder().setInputStream(in); diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index f8f451f9f3e..66564928213 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -39,7 +39,6 @@ import org.apache.commons.compress.compressors.lha.Lh5CompressorInputStream; import org.apache.commons.compress.compressors.lha.Lh6CompressorInputStream; import org.apache.commons.compress.compressors.lha.Lh7CompressorInputStream; -import org.apache.commons.io.Charsets; import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.io.input.ChecksumInputStream; @@ -55,37 +54,19 @@ */ public class LhaArchiveInputStream extends ArchiveInputStream { - // @formatter:off /** * Builds a new {@link LhaArchiveInputStream}. - * *

* For example: *

*
{@code
-     * LhaArchiveInputStream s = LhaArchiveInputStream.builder()
-     *   .setInputStream(archiveInputStream)
+     * LhaArchiveInputStream in = LhaArchiveInputStream.builder()
+     *   .setPath(inputPath)
      *   .setCharset(StandardCharsets.UTF_8)
-     *   .get();}
-     * 
+ * .get(); + * } */ - // @formatter:on - public static class Builder { - - /** - * The InputStream to read the archive data from. - */ - private InputStream inputStream; - - /** - * The default Charset. - */ - private Charset charsetDefault = StandardCharsets.US_ASCII; - - /** - * The Charset, defaults to {@link StandardCharsets#US_ASCII}. - */ - private Charset charset = charsetDefault; + public static class Builder extends AbstractBuilder { /** * The file separator char, defaults to {@link File#separatorChar}. @@ -96,7 +77,7 @@ public static class Builder { * Constructs a new instance. */ private Builder() { - // empty + setCharset(DEFAULT_CHARSET); } /** @@ -104,30 +85,9 @@ private Builder() { * * @return a new LhaArchiveInputStream. */ - public LhaArchiveInputStream get() { - return new LhaArchiveInputStream(this.inputStream, this.charset, this.fileSeparatorChar); - } - - /** - * Sets the InputStream to read the archive data from. - * - * @param inputStream the InputStream. - * @return {@code this} instance. - */ - public Builder setInputStream(final InputStream inputStream) { - this.inputStream = inputStream; - return this; - } - - /** - * Sets the Charset. - * - * @param charset the Charset, null resets to the default {@link StandardCharsets#US_ASCII}. - * @return {@code this} instance. - */ - public Builder setCharset(final Charset charset) { - this.charset = Charsets.toCharset(charset, charsetDefault); - return this; + @Override + public LhaArchiveInputStream get() throws IOException { + return new LhaArchiveInputStream(this); } /** @@ -216,6 +176,11 @@ Builder setFileSeparatorChar(final char fileSeparatorChar) { */ private static final int MAX_PATHNAME_LENGTH = 4096; + /** + * Default charset for decoding filenames. + */ + private static final Charset DEFAULT_CHARSET = StandardCharsets.US_ASCII; + private final char fileSeparatorChar; private LhaArchiveEntry currentEntry; private InputStream currentCompressedStream; @@ -230,9 +195,9 @@ public static Builder builder() { return new Builder(); } - private LhaArchiveInputStream(final InputStream inputStream, final Charset charset, final char fileSeparatorChar) { - super(inputStream, charset); - this.fileSeparatorChar = fileSeparatorChar; + private LhaArchiveInputStream(final Builder builder) throws IOException { + super(builder.getInputStream(), builder.getCharset()); + this.fileSeparatorChar = builder.fileSeparatorChar; } @Override diff --git a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java index c3246516f8b..defb2ac60a8 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java @@ -97,7 +97,7 @@ public String toString() { */ private static final String ARJ_DEFAULT; private static final String DUMP_DEFAULT; - private static final String LHA_DEFAULT = getCharsetName(LhaArchiveInputStream.builder().get()); + private static final String LHA_DEFAULT = getCharsetName(LhaArchiveInputStream.builder().setByteArray(ArrayUtils.EMPTY_BYTE_ARRAY)); private static final String ZIP_DEFAULT = getCharsetName(new ZipArchiveInputStream(null)); private static final String CPIO_DEFAULT = getCharsetName(new CpioArchiveInputStream(null)); private static final String TAR_DEFAULT = getCharsetName(new TarArchiveInputStream(null)); @@ -168,6 +168,14 @@ private static String getCharsetName(final ArchiveInputStream inputStream) { return inputStream.getCharset().name(); } + private static String getCharsetName(final ArchiveInputStream.AbstractBuilder builder) { + try { + return builder.get().getCharset().name(); + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + @SuppressWarnings("deprecation") // test of deprecated method static ArchiveStreamFactory getFactory(final String entryEncoding) { final ArchiveStreamFactory fac = new ArchiveStreamFactory();

0UfBYFz3%MlnmnvG@|IVO8f%gStJ!Ce#S~9x5rWK^TU0&y_F5$Qzhvo=z0~gt zcW?Y`C8~V=INtK+K7IGlA>H{_QqF@0Z!4ugq@k(Y{}6^)#y5(_qkC3N`KzCzFoqzA zYz14Gp&cWtR_ng3p%RwOFaC0T=QdVmlQ5c9?vQ#r#+v_1R)?0sLN$RFNULLb*0iim zRal$d@LuC=IPJFjmroq)N36E1=s+uY{`l1HuveL9J;xjD2g->%awsX&lEWJQddIsC z%T`U`^hnipXU={+T~QUMT{=Nlb6^@|7O518>o>8=^~dg#=*9)Am1ar8@{8h?i068z z*a!BVin=-A*gaCDZr?(kQ2A`BsF*nIP{&c1tk`Z*N^Nv0fa34xdEvS}>4K|1<~a2~ z_-vRNN?2b1vR+*grpVYIuEwx1Zpxs;WYIgnyOp|;;R4dtv4I02Oa8mRd*;(dUe{LR zEg?UY7n?Eg^0yeAd_D4~D}t-!SONK!auJw?Y;9Eh7?f>|9b!$j5~9rd?b3nVG=r01 zWS0{#@y8T**ycQo-CZk?$oU#^YTRK6O-n#w>sRGE6iE)H@7F4Ph{Wz6-J)Nn3LBNL ztQin_tQ?&v*mCS{?a@wUPt)9}t&(>zOP~JHZbTLnzHFO6NB71KPH3KhMVis^pEhHn z7~Now))%>U=7kI0dsnR5C}>!c_%yy=shCy5jaR>xFZ}0-PMKA5-(ZMr#VVfXCqWZ* z``BxanN$;1W88ezRE)C16J)kg&QeX~Xavw`1y{u%dn<$wUDn-NZpr_c-Wi`M_ugt)PzlLnoaPy;;?y2>mERf<-A= z41kc?-b97FzPW+->yQnN>f^mw7ASt{mEA?%Bb+Qy(R|^06CdTz?csaF?X|$HEA}L|w}7|Tq=MbgOBB30KqAHwr-n}ZDx^y5-+lZuyZ$k^ z?-C|#Kq@>cRAfNCq`FE?9F{z)3OUiaVm3Vlr$BJ@z%UpTdys|3= z!i5VtSfYxdV-4Of!c4#U{BLUQM860dwThFIoL6&c8Z*;L1b~+vM8Wsmaq^by`)mJsNLapT;uRl^b`w2%&$q77VA=cY!bHh>P^CX}1gNMbl_!H2Q5>;v zn=|v!gc|a9(c)He`G9S@!kh-{EqeSU} zpjh)fMn(Dhre^j}f-IYG)2UmJEB0g!87Y)cM#gS>45aPx5L#hfL&(ykk8>!^tL?Gf zQ#X%m3pN;}uLTe0n_xasf|%hehT{(gkTD)Q6t5HK-c^GM;0J1c&Kop|h#_pTwBark za21wKd-Bkx$K^AlyG`ZM)shzg=gT|N`}GDcC{8I} z<~UUuLQkkBTTy+&(=}EaIunf#1UpnWc0DPZ&K8Kv+x<*xzZi=7=ESDJE@t7r8{>A7 zqWHrovwyI$Qp=V#IIX%83Dy!iRqt-tidEw2Q-3;7r5Ov`Oqr2jZL!o_Pd-)S78VJ( zsv_lxJ2$3&j7~lSjzpKDpJca8W}EG$tHs5UqliDW-v@Le!h7TZ7N%AWuPyWavJO45 zk?%GeNVdsEQbGw&pDBRwXA=JbebdthcuBU0n~ z{*k0tXmfY}U{vIr6{@ryxN_oh{ga0~RS9vKNz%CJ;Z5wb*E`Kc7u%*R} zR1j!c3$|v;2GHq43-_hV57S{%4IveDjK$Rn$s$1VSt2SnU6W=dN>VGQe|?1QoVcA) zaeFJ;+Ol~J_BpToXb(C(ytR3~E>T?h01EwWSmY%eb;|B0u(=0rYYc)wSdQZwxoP?T zeyGhKtv8kOy1kSZ{>WJ$x33x<3f!{L;~Hk&7CdbArv{=m!ww;Xh2Z#T_T+B z#~+Gumy8*&>L9tHVgAR===jW^wp2DQHG^bKoh%2Zi8dS0n&#c#8YxV#AaOunA&bUI zE41TXc6@4Y^QgG6agUVpVfXt$vbu0 z$+B)dn7_q!?qzkvKN8A66@u5VGmiun&qpWCxW|m`FH2X?1YtE) zf4B{zIz{0`!Izw62WJXXGiy4|+aMu!&C+b2}von|~KM;@gtAJdSpC;4Oo!Gt-%i8C)P z75w%dE-^t6RII4ed`ko_UucZ%UkD_{{I+?KXT*&vo(1_M%nds>Fp3|7r{I!7NT)`e z5Sx6Wl{tO#`sJQhwci?%th!&c#otQZ%uJ}MAdnVFlTzvO{4k0K&URWO75g&iZAM!k zS3tZn@`o83cyZ!|^`VS*k})rB?yufJN{j2$hQz+I;JO}IP8xwiDQ&eU+@Or$n4=)O z)!M>DA3~YRZgK(JD=~Sz7`Cfx0;F19>%CX*E00j{V_9{uy!&iWu;y0A5L9w6Ck4(_ zUi5X!A3Yf4A+fR)_EMZ(sj!#cIos3BMp)_+(XE-+!+`VoCoX$B3Ckt;{Tj(mSMa)F hOXBPw+TZY)SLGM)f?#Y0u2UXka`6MQDpTcsl_FRdLq>dI8bB& literal 0 HcmV?d00001 diff --git a/src/test/resources/test-macos-l1.lha b/src/test/resources/test-macos-l1.lha new file mode 100644 index 0000000000000000000000000000000000000000..95401c661d41380717bd320c5301aa62b102e02d GIT binary patch literal 343 zcmb2l*3HRC(UoR^fc#9}Xazne DEx1wa literal 0 HcmV?d00001 diff --git a/src/test/resources/test-msdos-l0.lha b/src/test/resources/test-msdos-l0.lha new file mode 100644 index 0000000000000000000000000000000000000000..175389c96161d812ed510f8bcf3dab84d259d3e0 GIT binary patch literal 220 zcmb21tecaOqRRjUDIvVk0t~DvnMH;%3=B$_P$Y}Ml00BZ5TOfFqrD!e#z2=3XaEQY z0o5olh$5>gNi8lh)GMhd*>ch&H76%uAw0h*CqJ$yhM1lGe@l3HA1s8>=^vgM>NhGvEhCJYQ7sX00M r3gP)hIVp;~T#^weHkV+x*$D31O^QI9<#E|;gw1AXlTKurfF(GO#i*>=t2Q@JP+c Q$yW%^FUm<#GSs5Gz7+4t?vOF0W%utnAASvgE zDhKJ&Md*6|nt{Ozq{~2;4`>Mp+kka#IqA#8z*v%6TwMUj^a&GuScwj06Se5! Date: Sat, 23 Aug 2025 11:50:25 +0200 Subject: [PATCH 03/22] Do not use Optional --- .../archivers/lha/LhaArchiveEntry.java | 53 +- .../archivers/lha/LhaArchiveInputStream.java | 27 +- .../archivers/lha/LhaArchiveEntryTest.java | 13 +- .../lha/LhaArchiveInputStreamTest.java | 468 +++++++++--------- 4 files changed, 279 insertions(+), 282 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java index 1d158e24fed..6ea8868f833 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java @@ -21,7 +21,6 @@ import java.time.ZoneOffset; import java.util.Date; -import java.util.Optional; import org.apache.commons.compress.archivers.ArchiveEntry; @@ -38,12 +37,12 @@ public class LhaArchiveEntry implements ArchiveEntry { private long compressedSize; private String compressionMethod; private int crcValue; - private Optional osId = Optional.empty(); - private Optional unixPermissionMode = Optional.empty(); - private Optional unixUserId = Optional.empty(); - private Optional unixGroupId = Optional.empty(); - private Optional msdosFileAttributes = Optional.empty(); - private Optional headerCrc = Optional.empty(); + private Integer osId; + private Integer unixPermissionMode; + private Integer unixUserId; + private Integer unixGroupId; + private Integer msdosFileAttributes; + private Integer headerCrc; public LhaArchiveEntry() { } @@ -59,20 +58,20 @@ public String toString() { .append(",compressionMethod=").append(compressionMethod) .append(",crcValue=").append(String.format("0x%04x", crcValue)); - if (osId.isPresent()) { - sb.append(",osId=").append(osId.get()); + if (osId != null) { + sb.append(",osId=").append(osId); } - if (unixPermissionMode.isPresent()) { - sb.append(",unixPermissionMode=").append(String.format("%03o", unixPermissionMode.get())); + if (unixPermissionMode != null) { + sb.append(",unixPermissionMode=").append(String.format("%03o", unixPermissionMode)); } - if (msdosFileAttributes.isPresent()) { - sb.append(",msdosFileAttributes=").append(String.format("%04x", msdosFileAttributes.get())); + if (msdosFileAttributes != null) { + sb.append(",msdosFileAttributes=").append(String.format("%04x", msdosFileAttributes)); } - if (headerCrc.isPresent()) { - sb.append(",headerCrc=").append(String.format("0x%04x", headerCrc.get())); + if (headerCrc != null) { + sb.append(",headerCrc=").append(String.format("0x%04x", headerCrc)); } return sb.append("]").toString(); @@ -158,35 +157,35 @@ public void setCrcValue(int crc) { * * @return operating system id if available */ - public Optional getOsId() { + public Integer getOsId() { return osId; } - public void setOsId(Optional osId) { + public void setOsId(Integer osId) { this.osId = osId; } - public Optional getUnixPermissionMode() { + public Integer getUnixPermissionMode() { return unixPermissionMode; } - public void setUnixPermissionMode(Optional unixPermissionMode) { + public void setUnixPermissionMode(Integer unixPermissionMode) { this.unixPermissionMode = unixPermissionMode; } - public Optional getUnixUserId() { + public Integer getUnixUserId() { return unixUserId; } - public void setUnixUserId(Optional unixUserId) { + public void setUnixUserId(Integer unixUserId) { this.unixUserId = unixUserId; } - public Optional getUnixGroupId() { + public Integer getUnixGroupId() { return unixGroupId; } - public void setUnixGroupId(Optional unixGroupId) { + public void setUnixGroupId(Integer unixGroupId) { this.unixGroupId = unixGroupId; } @@ -195,22 +194,22 @@ public void setUnixGroupId(Optional unixGroupId) { * * @return MS-DOS file attributes if available */ - public Optional getMsdosFileAttributes() { + public Integer getMsdosFileAttributes() { return msdosFileAttributes; } - public void setMsdosFileAttributes(Optional msdosFileAttributes) { + public void setMsdosFileAttributes(Integer msdosFileAttributes) { this.msdosFileAttributes = msdosFileAttributes; } /** * Don't expose the header CRC publicly, as it is of no interest to most users. */ - Optional getHeaderCrc() { + Integer getHeaderCrc() { return headerCrc; } - void setHeaderCrc(Optional headerCrc) { + void setHeaderCrc(Integer headerCrc) { this.headerCrc = headerCrc; } } diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index cd79abd9c35..4f870fa5930 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -29,7 +29,6 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; -import java.util.Optional; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveException; @@ -312,7 +311,7 @@ protected LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException entry.setDirectory(isDirectory(entry.getCompressionMethod())); entry.setCrcValue(Short.toUnsignedInt(buffer.getShort())); - entry.setOsId(Optional.of(Byte.toUnsignedInt(buffer.get()))); + entry.setOsId(Byte.toUnsignedInt(buffer.get())); if (calculateHeaderChecksum(buffer) != baseHeaderChecksum) { throw new ArchiveException("Invalid header level 1 checksum"); @@ -337,11 +336,11 @@ protected LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException entry.setCompressedSize(skipSize); - if (entry.getHeaderCrc().isPresent()) { + if (entry.getHeaderCrc() != null) { // Calculate CRC16 of full header final long headerCrc = calculateCRC16(headerParts.toArray(new ByteBuffer[headerParts.size()])); - if (headerCrc != entry.getHeaderCrc().get()) { - throw new ArchiveException("Invalid header CRC expected=0x%04x found=0x%04x", headerCrc, entry.getHeaderCrc().get()); + if (headerCrc != entry.getHeaderCrc()) { + throw new ArchiveException("Invalid header CRC expected=0x%04x found=0x%04x", headerCrc, entry.getHeaderCrc()); } } @@ -373,7 +372,7 @@ protected LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException entry.setName(""); entry.setDirectory(isDirectory(entry.getCompressionMethod())); entry.setCrcValue(Short.toUnsignedInt(buffer.getShort(HEADER_LEVEL_2_OFFSET_CRC))); - entry.setOsId(Optional.of(Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_2_OFFSET_OS_ID)))); + entry.setOsId(Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_2_OFFSET_OS_ID))); int extendedHeaderOffset = HEADER_LEVEL_2_OFFSET_FIRST_EXTENDED_HEADER_SIZE; int nextHeaderSize = Short.toUnsignedInt(buffer.getShort(extendedHeaderOffset)); @@ -388,11 +387,11 @@ protected LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException nextHeaderSize = Short.toUnsignedInt(extendedHeaderBuffer.getShort(extendedHeaderBuffer.limit() - 2)); } - if (entry.getHeaderCrc().isPresent()) { + if (entry.getHeaderCrc() != null) { // Calculate CRC16 of full header final long headerCrc = calculateCRC16(buffer); - if (headerCrc != entry.getHeaderCrc().get()) { - throw new ArchiveException("Invalid header CRC expected=0x%04x found=0x%04x", headerCrc, entry.getHeaderCrc().get()); + if (headerCrc != entry.getHeaderCrc()) { + throw new ArchiveException("Invalid header CRC expected=0x%04x found=0x%04x", headerCrc, entry.getHeaderCrc()); } } @@ -526,7 +525,7 @@ protected void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final final int crcPos = extendedHeaderBuffer.position(); // Save the current position to be able to set the header CRC later // Header CRC - entry.setHeaderCrc(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); + entry.setHeaderCrc(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); // Set header CRC to zero to be able to later compute the CRC of the full header extendedHeaderBuffer.putShort(crcPos, (short) 0); @@ -566,14 +565,14 @@ protected void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final } } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_MSDOS_FILE_ATTRIBUTES) { // MS-DOS file attributes - entry.setMsdosFileAttributes(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); + entry.setMsdosFileAttributes(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_PERMISSION) { // UNIX file permission - entry.setUnixPermissionMode(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); + entry.setUnixPermissionMode(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_UID_GID) { // UNIX group/user ID - entry.setUnixGroupId(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); - entry.setUnixUserId(Optional.of(Short.toUnsignedInt(extendedHeaderBuffer.getShort()))); + entry.setUnixGroupId(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); + entry.setUnixUserId(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_TIMESTAMP) { // UNIX last modified time entry.setLastModifiedDate(new Date(Integer.toUnsignedLong(extendedHeaderBuffer.getInt()) * 1000)); diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java index 0c51e59a107..428cd165278 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java @@ -22,7 +22,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Date; -import java.util.Optional; import org.junit.jupiter.api.Test; @@ -52,12 +51,12 @@ void testToStringAllFields() { entry.setCompressedSize(52); entry.setCompressionMethod("-lh5-"); entry.setCrcValue(0x6496); - entry.setOsId(Optional.of(85)); - entry.setUnixPermissionMode(Optional.of(0100644)); - entry.setUnixGroupId(Optional.of(20)); - entry.setUnixUserId(Optional.of(501)); - entry.setMsdosFileAttributes(Optional.of(0x0010)); - entry.setHeaderCrc(Optional.of(0xb772)); + entry.setOsId(85); + entry.setUnixPermissionMode(0100644); + entry.setUnixGroupId(20); + entry.setUnixUserId(501); + entry.setMsdosFileAttributes(0x0010); + entry.setHeaderCrc(0xb772); assertEquals( "LhaArchiveEntry[name=dir1/test1.txt,directory=false,size=57,lastModifiedDate=2025-08-03T16:02:22Z,compressedSize=52," + diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java index 691d81d3655..ed0ab0bb23b 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java @@ -177,12 +177,12 @@ void testParseHeaderLevel0File() throws IOException { assertEquals(52, entry.getCompressedSize()); assertEquals("-lh5-", entry.getCompressionMethod()); assertEquals(0x6496, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -274,12 +274,12 @@ void testParseHeaderLevel0FileWithFoldersMacos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -292,12 +292,12 @@ void testParseHeaderLevel0FileWithFoldersMacos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -310,12 +310,12 @@ void testParseHeaderLevel0FileWithFoldersMacos() throws IOException { assertEquals(13, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0x7757, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -328,12 +328,12 @@ void testParseHeaderLevel0FileWithFoldersMacos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -346,12 +346,12 @@ void testParseHeaderLevel0FileWithFoldersMacos() throws IOException { assertEquals(13, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0x7757, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -375,12 +375,12 @@ void testParseHeaderLevel0FileWithFoldersMsdos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -393,12 +393,12 @@ void testParseHeaderLevel0FileWithFoldersMsdos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -411,12 +411,12 @@ void testParseHeaderLevel0FileWithFoldersMsdos() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -429,12 +429,12 @@ void testParseHeaderLevel0FileWithFoldersMsdos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -447,12 +447,12 @@ void testParseHeaderLevel0FileWithFoldersMsdos() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertFalse(entry.getOsId().isPresent()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertNull(entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -473,12 +473,12 @@ void testParseHeaderLevel1File() throws IOException { assertEquals(52, entry.getCompressedSize()); assertEquals("-lh5-", entry.getCompressionMethod()); assertEquals(0x6496, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(0100644, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertEquals(85, entry.getOsId()); + assertEquals(0100644, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -503,12 +503,12 @@ void testParseHeaderLevel1FileMsdosChecksumAndCrc() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertEquals(0x0010, entry.getMsdosFileAttributes().get()); - assertEquals(0xb772, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertEquals(0x0010, entry.getMsdosFileAttributes()); + assertEquals(0xb772, entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -521,12 +521,12 @@ void testParseHeaderLevel1FileMsdosChecksumAndCrc() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0x9b71, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0x9b71, entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -594,12 +594,12 @@ void testParseHeaderLevel1FileWithFoldersMacos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(040755, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertEquals(85, entry.getOsId()); + assertEquals(040755, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -612,12 +612,12 @@ void testParseHeaderLevel1FileWithFoldersMacos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(040755, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertEquals(85, entry.getOsId()); + assertEquals(040755, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -630,12 +630,12 @@ void testParseHeaderLevel1FileWithFoldersMacos() throws IOException { assertEquals(13, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0x7757, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(0100644, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertEquals(85, entry.getOsId()); + assertEquals(0100644, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -648,12 +648,12 @@ void testParseHeaderLevel1FileWithFoldersMacos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(040755, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertEquals(85, entry.getOsId()); + assertEquals(040755, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -666,12 +666,12 @@ void testParseHeaderLevel1FileWithFoldersMacos() throws IOException { assertEquals(13, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0x7757, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(0100644, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertFalse(entry.getHeaderCrc().isPresent()); + assertEquals(85, entry.getOsId()); + assertEquals(0100644, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertNull(entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -695,12 +695,12 @@ void testParseHeaderLevel1FileWithFoldersMsdos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertEquals(0x0010, entry.getMsdosFileAttributes().get()); - assertEquals(0xd458, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertEquals(0x0010, entry.getMsdosFileAttributes()); + assertEquals(0xd458, entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -713,12 +713,12 @@ void testParseHeaderLevel1FileWithFoldersMsdos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertEquals(0x0010, entry.getMsdosFileAttributes().get()); - assertEquals(0x40de, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertEquals(0x0010, entry.getMsdosFileAttributes()); + assertEquals(0x40de, entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -731,12 +731,12 @@ void testParseHeaderLevel1FileWithFoldersMsdos() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0x34b0, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0x34b0, entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -749,12 +749,12 @@ void testParseHeaderLevel1FileWithFoldersMsdos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertEquals(0x0010, entry.getMsdosFileAttributes().get()); - assertEquals(0x21b2, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertEquals(0x0010, entry.getMsdosFileAttributes()); + assertEquals(0x21b2, entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -767,12 +767,12 @@ void testParseHeaderLevel1FileWithFoldersMsdos() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0x8f0c, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0x8f0c, entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -793,12 +793,12 @@ void testParseHeaderLevel2File() throws IOException { assertEquals(52, entry.getCompressedSize()); assertEquals("-lh5-", entry.getCompressionMethod()); assertEquals(0x6496, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(0100644, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0x01a5, entry.getHeaderCrc().get()); + assertEquals(85, entry.getOsId()); + assertEquals(0100644, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0x01a5, entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -854,12 +854,12 @@ void testParseHeaderLevel2FileWithFoldersAmiga() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertEquals(65, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0xe1a5, entry.getHeaderCrc().get()); + assertEquals(65, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0xe1a5, entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -872,12 +872,12 @@ void testParseHeaderLevel2FileWithFoldersAmiga() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertEquals(65, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0xd6b0, entry.getHeaderCrc().get()); + assertEquals(65, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0xd6b0, entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -901,12 +901,12 @@ void testParseHeaderLevel2FileWithFoldersMacos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(040755, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0xf3f7, entry.getHeaderCrc().get()); + assertEquals(85, entry.getOsId()); + assertEquals(040755, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0xf3f7, entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -919,12 +919,12 @@ void testParseHeaderLevel2FileWithFoldersMacos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(040755, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0x50d3, entry.getHeaderCrc().get()); + assertEquals(85, entry.getOsId()); + assertEquals(040755, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0x50d3, entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -937,12 +937,12 @@ void testParseHeaderLevel2FileWithFoldersMacos() throws IOException { assertEquals(13, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0x7757, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(0100644, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0x589e, entry.getHeaderCrc().get()); + assertEquals(85, entry.getOsId()); + assertEquals(0100644, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0x589e, entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -955,12 +955,12 @@ void testParseHeaderLevel2FileWithFoldersMacos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(040755, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0x126d, entry.getHeaderCrc().get()); + assertEquals(85, entry.getOsId()); + assertEquals(040755, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0x126d, entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -973,12 +973,12 @@ void testParseHeaderLevel2FileWithFoldersMacos() throws IOException { assertEquals(13, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0x7757, entry.getCrcValue()); - assertEquals(85, entry.getOsId().get()); - assertEquals(0100644, entry.getUnixPermissionMode().get()); - assertEquals(20, entry.getUnixGroupId().get()); - assertEquals(501, entry.getUnixUserId().get()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0xdbdd, entry.getHeaderCrc().get()); + assertEquals(85, entry.getOsId()); + assertEquals(0100644, entry.getUnixPermissionMode()); + assertEquals(20, entry.getUnixGroupId()); + assertEquals(501, entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0xdbdd, entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -1002,12 +1002,12 @@ void testParseHeaderLevel2FileWithFoldersMsdos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertEquals(0x0010, entry.getMsdosFileAttributes().get()); - assertEquals(0x496a, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertEquals(0x0010, entry.getMsdosFileAttributes()); + assertEquals(0x496a, entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -1020,12 +1020,12 @@ void testParseHeaderLevel2FileWithFoldersMsdos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertEquals(0x0010, entry.getMsdosFileAttributes().get()); - assertEquals(0xebe7, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertEquals(0x0010, entry.getMsdosFileAttributes()); + assertEquals(0xebe7, entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -1038,12 +1038,12 @@ void testParseHeaderLevel2FileWithFoldersMsdos() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0x214a, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0x214a, entry.getHeaderCrc()); // Check directory entry entry = archive.getNextEntry(); @@ -1056,12 +1056,12 @@ void testParseHeaderLevel2FileWithFoldersMsdos() throws IOException { assertEquals(0, entry.getCompressedSize()); assertEquals("-lhd-", entry.getCompressionMethod()); assertEquals(0x0000, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertEquals(0x0010, entry.getMsdosFileAttributes().get()); - assertEquals(0x74ca, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertEquals(0x0010, entry.getMsdosFileAttributes()); + assertEquals(0x74ca, entry.getHeaderCrc()); // Check file entry entry = archive.getNextEntry(); @@ -1074,12 +1074,12 @@ void testParseHeaderLevel2FileWithFoldersMsdos() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertFalse(entry.getMsdosFileAttributes().isPresent()); - assertEquals(0x165f, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertNull(entry.getMsdosFileAttributes()); + assertEquals(0x165f, entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -1101,12 +1101,12 @@ void testParseHeaderLevel2FileWithMsdosAttributes() throws IOException { assertEquals(14, entry.getCompressedSize()); assertEquals("-lh0-", entry.getCompressionMethod()); assertEquals(0xc9b4, entry.getCrcValue()); - assertEquals(77, entry.getOsId().get()); - assertFalse(entry.getUnixPermissionMode().isPresent()); - assertFalse(entry.getUnixGroupId().isPresent()); - assertFalse(entry.getUnixUserId().isPresent()); - assertEquals(0x0021, entry.getMsdosFileAttributes().get()); - assertEquals(0x14bb, entry.getHeaderCrc().get()); + assertEquals(77, entry.getOsId()); + assertNull(entry.getUnixPermissionMode()); + assertNull(entry.getUnixGroupId()); + assertNull(entry.getUnixUserId()); + assertEquals(0x0021, entry.getMsdosFileAttributes()); + assertEquals(0x14bb, entry.getHeaderCrc()); // No more entries expected assertNull(archive.getNextEntry()); @@ -1118,7 +1118,7 @@ void testParseExtendedHeaderCommon() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { final LhaArchiveEntry entry = new LhaArchiveEntry(); archive.parseExtendedHeader(toByteBuffer(0x00, 0x22, 0x33, 0x00, 0x00), entry); - assertEquals(0x3322, entry.getHeaderCrc().get()); + assertEquals(0x3322, entry.getHeaderCrc()); } } @@ -1176,8 +1176,8 @@ void testParseExtendedHeaderUnixPermission() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { final LhaArchiveEntry entry = new LhaArchiveEntry(); archive.parseExtendedHeader(toByteBuffer(0x50, 0xa4, 0x81, 0x00, 0x00), entry); - assertEquals(0x81a4, entry.getUnixPermissionMode().get()); - assertEquals(0100644, entry.getUnixPermissionMode().get()); + assertEquals(0x81a4, entry.getUnixPermissionMode()); + assertEquals(0100644, entry.getUnixPermissionMode()); } } @@ -1186,8 +1186,8 @@ void testParseExtendedHeaderUnixUidGid() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { final LhaArchiveEntry entry = new LhaArchiveEntry(); archive.parseExtendedHeader(toByteBuffer(0x51, 0x14, 0x00, 0xf5, 0x01, 0x00, 0x00), entry); - assertEquals(0x0014, entry.getUnixGroupId().get()); - assertEquals(0x01f5, entry.getUnixUserId().get()); + assertEquals(0x0014, entry.getUnixGroupId()); + assertEquals(0x01f5, entry.getUnixUserId()); } } @@ -1205,7 +1205,7 @@ void testParseExtendedHeaderMSdosFileAttributes() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { final LhaArchiveEntry entry = new LhaArchiveEntry(); archive.parseExtendedHeader(toByteBuffer(0x40, 0x10, 0x00, 0x00), entry); - assertEquals(0x10, entry.getMsdosFileAttributes().get()); + assertEquals(0x10, entry.getMsdosFileAttributes()); } } From b6ee9eca57eb70cb7455fe3ec456f13fb1928772 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 23 Aug 2025 12:36:15 +0200 Subject: [PATCH 04/22] Throw CompressorException for unexpected end of stream --- ...tLhStaticHuffmanCompressorInputStream.java | 51 +++++++++++++------ ...taticHuffmanCompressorInputStreamTest.java | 24 ++++++++- 2 files changed, 59 insertions(+), 16 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java index 8a314361327..3ff5d7e2dfd 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java @@ -205,13 +205,13 @@ private void fillBuffer() throws IOException { */ protected BinaryTree readCommandDecodingTree() throws IOException { // Number of code lengths to read - final int numCodeLengths = (int) bin.readBits(COMMAND_DECODING_LENGTH_BITS); + final int numCodeLengths = readBits(COMMAND_DECODING_LENGTH_BITS); if (numCodeLengths > MAX_NUMBER_OF_COMMAND_DECODING_CODE_LENGTHS) { throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, MAX_NUMBER_OF_COMMAND_DECODING_CODE_LENGTHS); } else if (numCodeLengths == 0) { // If numCodeLengths is zero, we read a single code length of COMMAND_DECODING_LENGTH_BITS bits and use as root of the tree - return new BinaryTree(new int[] { (int) bin.readBits(COMMAND_DECODING_LENGTH_BITS) }); + return new BinaryTree(new int[] { readBits(COMMAND_DECODING_LENGTH_BITS) }); } else { // Read all code lengths final int[] codeLengths = new int[numCodeLengths]; @@ -220,7 +220,7 @@ protected BinaryTree readCommandDecodingTree() throws IOException { if (index == 2) { // After reading the first three code lengths, we read a 2-bit skip range - index += (int) bin.readBits(2); + index += readBits(2); } } @@ -236,15 +236,19 @@ protected BinaryTree readCommandDecodingTree() throws IOException { * @throws IOException if an I/O error occurs */ protected int readCodeLength() throws IOException { - int len = (int) bin.readBits(CODE_LENGTH_BITS); + int len = readBits(CODE_LENGTH_BITS); if (len == 0x07) { - // Count the number of following consecutive one bits - while (bin.readBit() == 1) { - if (len == MAX_CODE_LENGTH) { + int bit = bin.readBit(); + while (bit == 1) { + if (++len > MAX_CODE_LENGTH) { throw new CompressorException("Code length overflow"); } - len++; + bit = bin.readBit(); + } + + if (bit == -1) { + throw new CompressorException("Unexpected end of stream"); } } @@ -259,13 +263,13 @@ protected int readCodeLength() throws IOException { * @throws IOException if an I/O error occurs */ protected BinaryTree readCommandTree(final BinaryTree commandDecodingTree) throws IOException { - final int numCodeLengths = (int) bin.readBits(COMMAND_TREE_LENGTH_BITS); + final int numCodeLengths = readBits(COMMAND_TREE_LENGTH_BITS); if (numCodeLengths > getMaxNumberOfCommands()) { throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, getMaxNumberOfCommands()); } else if (numCodeLengths == 0) { // If numCodeLengths is zero, we read a single code length of COMMAND_TREE_LENGTH_BITS bits and use as root of the tree - return new BinaryTree(new int[] { (int) bin.readBits(COMMAND_TREE_LENGTH_BITS) }); + return new BinaryTree(new int[] { readBits(COMMAND_TREE_LENGTH_BITS) }); } else { // Read all code lengths final int[] codeLengths = new int[numCodeLengths]; @@ -278,10 +282,10 @@ protected BinaryTree readCommandTree(final BinaryTree commandDecodingTree) throw index++; } else if (codeOrSkipRange == 1) { // Skip a range of code lengths, read 4 bits to determine how many to skip - index += (int) bin.readBits(4) + 3; + index += readBits(4) + 3; } else if (codeOrSkipRange == 2) { // Skip a range of code lengths, read 9 bits to determine how many to skip - index += (int) bin.readBits(9) + 20; + index += readBits(9) + 20; } else { // Subtract 2 from the codeOrSkipRange to get the code length codeLengths[index++] = codeOrSkipRange - 2; @@ -300,13 +304,13 @@ protected BinaryTree readCommandTree(final BinaryTree commandDecodingTree) throw */ private BinaryTree readDistanceTree() throws IOException { // Number of code lengths to read - final int numCodeLengths = (int) bin.readBits(getDistanceBits()); + final int numCodeLengths = readBits(getDistanceBits()); if (numCodeLengths > getDistanceCodeSize()) { throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, getDistanceCodeSize()); } else if (numCodeLengths == 0) { // If numCodeLengths is zero, we read a single code length of getDistanceBits() bits and use as root of the tree - return new BinaryTree(new int[] { (int) bin.readBits(getDistanceBits()) }); + return new BinaryTree(new int[] { readBits(getDistanceBits()) }); } else { // Read all code lengths final int[] codeLengths = new int[numCodeLengths]; @@ -334,11 +338,28 @@ private int readDistance() throws IOException { return bits; } else { // Bits minus one is the number of bits to read for the distance - final int value = (int) bin.readBits(bits - 1); + final int value = readBits(bits - 1); // Add the implicit bit (1 << (bits - 1)) to the value read from the stream giving the distance. // E.g. if bits is 6, we read 5 bits giving value 8 and then we add 32 giving a distance of 40. return value | (1 << (bits - 1)); } } + + /** + * Read the specified number of bits from the underlying stream throwing CompressorException + * if the end of the stream is reached before reading the requested number of bits. + * + * @param count the number of bits to read + * @return the bits concatenated as an int using the stream's byte order + * @throws IOException if an I/O error occurs. + */ + private int readBits(final int count) throws IOException { + final long value = bin.readBits(count); + if (value < 0) { + throw new CompressorException("Unexpected end of stream"); + } + + return (int) value; + } } diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStreamTest.java index 278090d7763..e8ea1d55dc9 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStreamTest.java @@ -96,6 +96,18 @@ void testReadCommandTreeWithInvalidSize() throws IOException { } } + @Test + void testReadCommandTreeUnexpectedEndOfStream() throws IOException { + try { + createLh5CompressorInputStream( + 0b00000000, 0b01111111 // 9 bits length (0x00) and only 8 bits instead of expected 9 bits which will cause an unexpected end of stream + ).readCommandTree(new BinaryTree(new int [] { 0 })); + fail("Expected CompressorException for unexpected end of stream"); + } catch (CompressorException e) { + assertEquals("Unexpected end of stream", e.getMessage()); + } + } + @Test void testReadCodeLength() throws IOException { assertEquals(0, createLh5CompressorInputStream(0x00, 0x00).readCodeLength()); // 0000 0000 0000 0000 @@ -117,13 +129,23 @@ void testReadCodeLength() throws IOException { assertEquals(16, createLh5CompressorInputStream(0xff, 0xf0).readCodeLength()); // 1111 1111 1111 0000 try { - assertEquals(17, createLh5CompressorInputStream(0xff, 0xf8).readCodeLength()); // 1111 1111 1111 1000 + createLh5CompressorInputStream(0xff, 0xf8).readCodeLength(); // 1111 1111 1111 1000 fail("Expected CompressorException for code length overflow"); } catch (CompressorException e) { assertEquals("Code length overflow", e.getMessage()); } } + @Test + void testReadCodeLengthUnexpectedEndOfStream() throws IOException { + try { + createLh5CompressorInputStream(0xff).readCodeLength(); // 1111 1111 EOF + fail("Expected CompressorException for unexpected end of stream"); + } catch (CompressorException e) { + assertEquals("Unexpected end of stream", e.getMessage()); + } + } + private Lh5CompressorInputStream createLh5CompressorInputStream(final int... data) throws IOException { final byte[] bytes = new byte[data.length]; for (int i = 0; i < data.length; i++) { From d1955cbe1becff07b4ad46e56e8ede167bb6fd15 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 23 Aug 2025 15:05:02 +0200 Subject: [PATCH 05/22] Use IOUtils.read to fill the entire buffer --- .../compress/archivers/lha/LhaArchiveInputStream.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index 4f870fa5930..77180855bba 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -215,8 +215,8 @@ public LhaArchiveEntry getNextEntry() throws IOException { protected LhaArchiveEntry readHeader() throws IOException { // Header level is not known yet. Read the minimum length header. final byte[] buffer = new byte[HEADER_GENERIC_MINIMUM_HEADER_LENGTH]; - final int len = in.read(buffer); - if (len == -1) { + final int len = IOUtils.read(in, buffer); + if (len == 0) { // EOF return null; } else if (len == 1 && buffer[0] == 0) { @@ -486,7 +486,7 @@ protected String getPathname(final ByteBuffer buffer, final int pathnameLength) */ private ByteBuffer readRemainingHeaderData(final ByteBuffer currentHeader, final int headerSize) throws IOException { final byte[] remainingData = new byte[headerSize - currentHeader.capacity()]; - final int len = in.read(remainingData); + final int len = IOUtils.read(in, remainingData); if (len != remainingData.length) { throw new ArchiveException("Error reading remaining header"); } @@ -503,7 +503,7 @@ private ByteBuffer readRemainingHeaderData(final ByteBuffer currentHeader, final */ private ByteBuffer readExtendedHeader(final int headerSize) throws IOException { final byte[] extensionHeader = new byte[headerSize]; - final int len = in.read(extensionHeader); + final int len = IOUtils.read(in, extensionHeader); if (len != extensionHeader.length) { throw new ArchiveException("Error reading extended header"); } From e27f360236fe7504b9a0f9a6c8de79302c51ccc5 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 23 Aug 2025 15:13:07 +0200 Subject: [PATCH 06/22] Add javadoc @since 1.29.0 --- .../apache/commons/compress/archivers/ArchiveStreamFactory.java | 2 +- .../apache/commons/compress/archivers/lha/LhaArchiveEntry.java | 2 +- .../commons/compress/archivers/lha/LhaArchiveInputStream.java | 2 +- .../compress/compressors/lha/Lh4CompressorInputStream.java | 2 +- .../compress/compressors/lha/Lh5CompressorInputStream.java | 2 +- .../compress/compressors/lha/Lh6CompressorInputStream.java | 2 +- .../compress/compressors/lha/Lh7CompressorInputStream.java | 2 +- .../java/org/apache/commons/compress/utils/CircularBuffer.java | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index 4ec0eed61cb..d2a2db99db8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -180,7 +180,7 @@ public class ArchiveStreamFactory implements ArchiveStreamProvider { /** * Constant (value {@value}) used to identify the LHA archive format. * Not supported as an output stream type. - * @since 1.29 + * @since 1.29.0 */ public static final String LHA = "lha"; diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java index 6ea8868f833..9aa7b977020 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java @@ -27,7 +27,7 @@ /** * Represents an entry in a LHA archive. * - * @since 1.29 + * @since 1.29.0 */ public class LhaArchiveEntry implements ArchiveEntry { private String name; diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index 77180855bba..15e1401eebb 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -49,7 +49,7 @@ * http://dangan.g.dgdg.jp/en/Content/Program/Java/jLHA/Notes/Notes.html * * @NotThreadSafe - * @since 1.29 + * @since 1.29.0 */ public class LhaArchiveInputStream extends ArchiveInputStream { // Fields that are the same across all header levels diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java index 9cf6154adcc..8ba3edb3fcd 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java @@ -25,7 +25,7 @@ /** * Decompressor for lh4. It has a dictionary size of 4096 bytes. * - * @since 1.29 + * @since 1.29.0 */ public class Lh4CompressorInputStream extends AbstractLhStaticHuffmanCompressorInputStream { public Lh4CompressorInputStream(final InputStream in) throws IOException { diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java index 9cdccdc2fa0..0d518f8031f 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java @@ -25,7 +25,7 @@ /** * Decompressor for lh5. It has a dictionary size of 8192 bytes. * - * @since 1.29 + * @since 1.29.0 */ public class Lh5CompressorInputStream extends AbstractLhStaticHuffmanCompressorInputStream { public Lh5CompressorInputStream(final InputStream in) throws IOException { diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java index 46deb8ecc4a..d5ac0b7d655 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java @@ -25,7 +25,7 @@ /** * Decompressor for lh6. It has a dictionary size of 32768 bytes. * - * @since 1.29 + * @since 1.29.0 */ public class Lh6CompressorInputStream extends AbstractLhStaticHuffmanCompressorInputStream { public Lh6CompressorInputStream(final InputStream in) throws IOException { diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java index 7e40b661fd6..77b1b81a8ee 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java @@ -25,7 +25,7 @@ /** * Decompressor for lh7. It has a dictionary size of 65536 bytes. * - * @since 1.29 + * @since 1.29.0 */ public class Lh7CompressorInputStream extends AbstractLhStaticHuffmanCompressorInputStream { public Lh7CompressorInputStream(final InputStream in) throws IOException { diff --git a/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java b/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java index e47834252ac..eb45af13601 100644 --- a/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java +++ b/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java @@ -22,7 +22,7 @@ /** * Circular byte buffer. * - * @since 1.29 + * @since 1.29.0 */ public class CircularBuffer { From f61cd80d4ae38602d494541f7ba27954e470f0f3 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 23 Aug 2025 15:17:04 +0200 Subject: [PATCH 07/22] Use varargs for BinaryTree constructor --- .../AbstractLhStaticHuffmanCompressorInputStream.java | 6 +++--- .../commons/compress/compressors/lha/BinaryTree.java | 2 +- .../commons/compress/compressors/lha/BinaryTreeTest.java | 9 ++------- 3 files changed, 6 insertions(+), 11 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java index 3ff5d7e2dfd..9e72ccd6a95 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java @@ -211,7 +211,7 @@ protected BinaryTree readCommandDecodingTree() throws IOException { throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, MAX_NUMBER_OF_COMMAND_DECODING_CODE_LENGTHS); } else if (numCodeLengths == 0) { // If numCodeLengths is zero, we read a single code length of COMMAND_DECODING_LENGTH_BITS bits and use as root of the tree - return new BinaryTree(new int[] { readBits(COMMAND_DECODING_LENGTH_BITS) }); + return new BinaryTree(readBits(COMMAND_DECODING_LENGTH_BITS)); } else { // Read all code lengths final int[] codeLengths = new int[numCodeLengths]; @@ -269,7 +269,7 @@ protected BinaryTree readCommandTree(final BinaryTree commandDecodingTree) throw throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, getMaxNumberOfCommands()); } else if (numCodeLengths == 0) { // If numCodeLengths is zero, we read a single code length of COMMAND_TREE_LENGTH_BITS bits and use as root of the tree - return new BinaryTree(new int[] { readBits(COMMAND_TREE_LENGTH_BITS) }); + return new BinaryTree(readBits(COMMAND_TREE_LENGTH_BITS)); } else { // Read all code lengths final int[] codeLengths = new int[numCodeLengths]; @@ -310,7 +310,7 @@ private BinaryTree readDistanceTree() throws IOException { throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, getDistanceCodeSize()); } else if (numCodeLengths == 0) { // If numCodeLengths is zero, we read a single code length of getDistanceBits() bits and use as root of the tree - return new BinaryTree(new int[] { readBits(getDistanceBits()) }); + return new BinaryTree(readBits(getDistanceBits())); } else { // Read all code lengths final int[] codeLengths = new int[numCodeLengths]; diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java b/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java index 615d9c0616e..680e198dd9a 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/BinaryTree.java @@ -55,7 +55,7 @@ class BinaryTree { * * @param array the array to build the binary tree from */ - BinaryTree(final int[] array) { + BinaryTree(final int... array) { if (array.length == 1) { // Tree only contains a single value, which is the root node value this.tree = new int[] { array[0] }; diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java index 7bc4083eca1..69d120b55a0 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java @@ -33,11 +33,8 @@ class BinaryTreeTest { @Test void testTree1() throws Exception { - final int[] length = new int[] { 4 }; - // Value: 0 - // Special case where the single array value is the root node value - final BinaryTree tree = new BinaryTree(length); + final BinaryTree tree = new BinaryTree(4); assertEquals(4, tree.read(createBitInputStream())); // Nothing to read, just return the root value } @@ -224,9 +221,7 @@ void testInvalidBitstream() throws Exception { @Test void testCheckMaxDepth() throws Exception { try { - final int[] length = new int[] { 1, 17 }; - - new BinaryTree(length); + new BinaryTree(1, 17); fail("Expected IllegalArgumentException for depth > 16"); } catch (IllegalArgumentException e) { assertEquals("Depth must not be negative and not bigger than 16 but is 17", e.getMessage()); From 1dea96285d03d780fa51a3c023a21048f152a5e8 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 23 Aug 2025 15:26:49 +0200 Subject: [PATCH 08/22] CircularBuffer should be final --- .../org/apache/commons/compress/utils/CircularBuffer.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java b/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java index eb45af13601..0487df274e4 100644 --- a/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java +++ b/src/main/java/org/apache/commons/compress/utils/CircularBuffer.java @@ -24,7 +24,7 @@ * * @since 1.29.0 */ -public class CircularBuffer { +public final class CircularBuffer { /** Size of the buffer */ private final int size; @@ -64,12 +64,9 @@ public boolean available() { public void copy(final int distance, final int length) { if (distance < 1) { throw new IllegalArgumentException("Distance must be at least 1"); - } - - if (distance > size) { + } else if (distance > size) { throw new IllegalArgumentException("Distance exceeds buffer size"); } - final int pos1 = writeIndex - distance; final int pos2 = pos1 + length; for (int i = pos1; i < pos2; i++) { @@ -101,7 +98,6 @@ public void put(final int value) { if (bytesAvailable == size) { throw new IllegalStateException("Buffer overflow: Cannot write to a full buffer"); } - buffer[writeIndex] = (byte) value; writeIndex = (writeIndex + 1) % size; bytesAvailable++; From b48bfd96073747c2d536f9c4fb980c721a4bfaa7 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 23 Aug 2025 16:10:37 +0200 Subject: [PATCH 09/22] Use builder pattern for LhaArchiveEntry and make all fields read only --- .../archivers/lha/LhaArchiveEntry.java | 241 ++++++++++++------ .../archivers/lha/LhaArchiveInputStream.java | 117 ++++----- .../archivers/lha/LhaArchiveEntryTest.java | 47 ++-- .../lha/LhaArchiveInputStreamTest.java | 80 +++--- 4 files changed, 288 insertions(+), 197 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java index 9aa7b977020..5f481013087 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java @@ -30,21 +30,37 @@ * @since 1.29.0 */ public class LhaArchiveEntry implements ArchiveEntry { - private String name; - private boolean directory; - private long size; - private Date lastModifiedDate; - private long compressedSize; - private String compressionMethod; - private int crcValue; - private Integer osId; - private Integer unixPermissionMode; - private Integer unixUserId; - private Integer unixGroupId; - private Integer msdosFileAttributes; - private Integer headerCrc; - - public LhaArchiveEntry() { + private final String name; + private final boolean directory; + private final long size; + private final Date lastModifiedDate; + private final long compressedSize; + private final String compressionMethod; + private final int crcValue; + private final Integer osId; + private final Integer unixPermissionMode; + private final Integer unixUserId; + private final Integer unixGroupId; + private final Integer msdosFileAttributes; + private final Integer headerCrc; + + LhaArchiveEntry(String name, boolean directory, long size, Date lastModifiedDate, + long compressedSize, String compressionMethod, int crcValue, Integer osId, + Integer unixPermissionMode, Integer unixUserId, Integer unixGroupId, + Integer msdosFileAttributes, Integer headerCrc) { + this.name = name; + this.directory = directory; + this.size = size; + this.lastModifiedDate = lastModifiedDate; + this.compressedSize = compressedSize; + this.compressionMethod = compressionMethod; + this.crcValue = crcValue; + this.osId = osId; + this.unixPermissionMode = unixPermissionMode; + this.unixUserId = unixUserId; + this.unixGroupId = unixGroupId; + this.msdosFileAttributes = msdosFileAttributes; + this.headerCrc = headerCrc; } @Override @@ -77,35 +93,27 @@ public String toString() { return sb.append("]").toString(); } + static Builder builder() { + return new Builder(); + } + @Override public String getName() { return name; } - public void setName(String name) { - this.name = name; - } - @Override public long getSize() { return size; } - public void setSize(long size) { - this.size = size; - } - @Override public Date getLastModifiedDate() { return lastModifiedDate; } - public void setLastModifiedDate(Date lastModifiedDate) { - this.lastModifiedDate = lastModifiedDate; - } - /** - * Returns the compressed size of this entry. + * Gets the compressed size of this entry. * * @return the compressed size */ @@ -113,21 +121,13 @@ public long getCompressedSize() { return compressedSize; } - public void setCompressedSize(long compressedSize) { - this.compressedSize = compressedSize; - } - - public void setDirectory(boolean directory) { - this.directory = directory; - } - @Override public boolean isDirectory() { return directory; } /** - * Returns the compression method of this entry. + * Gets the compression method of this entry. * * @return the compression method */ @@ -135,12 +135,8 @@ public String getCompressionMethod() { return compressionMethod; } - public void setCompressionMethod(String compressionMethod) { - this.compressionMethod = compressionMethod; - } - /** - * Returns the CRC-16 checksum of the uncompressed data of this entry. + * Gets the CRC-16 checksum of the uncompressed data of this entry. * * @return CRC-16 checksum of the uncompressed data */ @@ -148,12 +144,8 @@ public int getCrcValue() { return crcValue; } - public void setCrcValue(int crc) { - this.crcValue = crc; - } - /** - * Returns the operating system id if available for this entry. + * Gets the operating system id if available for this entry. * * @return operating system id if available */ @@ -161,36 +153,35 @@ public Integer getOsId() { return osId; } - public void setOsId(Integer osId) { - this.osId = osId; - } - + /** + * Gets the Unix permission mode if available for this entry. + * + * @return Unix permission mode or null if not available + */ public Integer getUnixPermissionMode() { return unixPermissionMode; } - public void setUnixPermissionMode(Integer unixPermissionMode) { - this.unixPermissionMode = unixPermissionMode; - } - + /** + * Gets the Unix user id if available for this entry. + * + * @return Unix user id or null if not available + */ public Integer getUnixUserId() { return unixUserId; } - public void setUnixUserId(Integer unixUserId) { - this.unixUserId = unixUserId; - } - + /** + * Gets the Unix group id if available for this entry. + * + * @return Unix group id or null if not available + */ public Integer getUnixGroupId() { return unixGroupId; } - public void setUnixGroupId(Integer unixGroupId) { - this.unixGroupId = unixGroupId; - } - /** - * Returns the MS-DOS file attributes if available for this entry. + * Gets the MS-DOS file attributes if available for this entry. * * @return MS-DOS file attributes if available */ @@ -198,18 +189,126 @@ public Integer getMsdosFileAttributes() { return msdosFileAttributes; } - public void setMsdosFileAttributes(Integer msdosFileAttributes) { - this.msdosFileAttributes = msdosFileAttributes; - } - /** - * Don't expose the header CRC publicly, as it is of no interest to most users. + * Gets the header CRC if available for this entry. + * + * This method is package private, as it is of no interest to most users. + * + * @return header CRC or null if not available */ Integer getHeaderCrc() { return headerCrc; } - void setHeaderCrc(Integer headerCrc) { - this.headerCrc = headerCrc; + static class Builder { + private String filename; + private String directoryName; + private boolean directory; + private long size; + private Date lastModifiedDate; + private long compressedSize; + private String compressionMethod; + private int crcValue; + private Integer osId; + private Integer unixPermissionMode; + private Integer unixUserId; + private Integer unixGroupId; + private Integer msdosFileAttributes; + private Integer headerCrc; + + Builder() { + } + + LhaArchiveEntry get() { + final String name = new StringBuilder() + .append(directoryName == null ? "" : directoryName) + .append(filename == null ? "" : filename) + .toString(); + + return new LhaArchiveEntry( + name, + directory, + size, + lastModifiedDate, + compressedSize, + compressionMethod, + crcValue, + osId, + unixPermissionMode, + unixUserId, + unixGroupId, + msdosFileAttributes, + headerCrc); + } + + Builder setFilename(String filenName) { + this.filename = filenName; + return this; + } + + Builder setDirectoryName(String directoryName) { + this.directoryName = directoryName; + return this; + } + + Builder setDirectory(boolean directory) { + this.directory = directory; + return this; + } + + Builder setSize(long size) { + this.size = size; + return this; + } + + Builder setLastModifiedDate(Date lastModifiedDate) { + this.lastModifiedDate = lastModifiedDate; + return this; + } + + Builder setCompressedSize(long compressedSize) { + this.compressedSize = compressedSize; + return this; + } + + Builder setCompressionMethod(String compressionMethod) { + this.compressionMethod = compressionMethod; + return this; + } + + Builder setCrcValue(int crcValue) { + this.crcValue = crcValue; + return this; + } + + Builder setOsId(Integer osId) { + this.osId = osId; + return this; + } + + Builder setUnixPermissionMode(Integer unixPermissionMode) { + this.unixPermissionMode = unixPermissionMode; + return this; + } + + Builder setUnixUserId(Integer unixUserId) { + this.unixUserId = unixUserId; + return this; + } + + Builder setUnixGroupId(Integer unixGroupId) { + this.unixGroupId = unixGroupId; + return this; + } + + Builder setMsdosFileAttributes(Integer msdosFileAttributes) { + this.msdosFileAttributes = msdosFileAttributes; + return this; + } + + Builder setHeaderCrc(Integer headerCrc) { + this.headerCrc = headerCrc; + return this; + } } } diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index 15e1401eebb..72c3ae42b90 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -258,24 +258,26 @@ protected LhaArchiveEntry readHeaderLevel0(ByteBuffer buffer) throws IOException final int headerChecksum = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_HEADER_CHECKSUM)); - final LhaArchiveEntry entry = new LhaArchiveEntry(); - entry.setCompressionMethod(getCompressionMethod(buffer)); - entry.setCompressedSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_COMPRESSED_SIZE))); - entry.setSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_ORIGINAL_SIZE))); - entry.setLastModifiedDate(new Date(ZipUtil.dosToJavaTime(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_LAST_MODIFIED_DATE_TIME))))); + final String compressionMethod = getCompressionMethod(buffer); + + final LhaArchiveEntry.Builder entryBuilder = new LhaArchiveEntry.Builder() + .setCompressionMethod(compressionMethod) + .setCompressedSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_COMPRESSED_SIZE))) + .setSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_ORIGINAL_SIZE))) + .setLastModifiedDate(new Date(ZipUtil.dosToJavaTime(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_0_OFFSET_LAST_MODIFIED_DATE_TIME))))); final int filenameLength = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_FILENAME_LENGTH)); buffer.position(HEADER_LEVEL_0_OFFSET_FILENAME); - entry.setName(getPathname(buffer, filenameLength)); - - entry.setDirectory(isDirectory(entry.getCompressionMethod())); - - entry.setCrcValue(Short.toUnsignedInt(buffer.getShort())); + entryBuilder.setFilename(getPathname(buffer, filenameLength)) + .setDirectory(isDirectory(compressionMethod)) + .setCrcValue(Short.toUnsignedInt(buffer.getShort())); if (calculateHeaderChecksum(buffer) != headerChecksum) { throw new ArchiveException("Invalid header level 0 checksum"); } + final LhaArchiveEntry entry = entryBuilder.get(); + prepareDecompression(entry); return entry; @@ -298,20 +300,20 @@ protected LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException final int baseHeaderChecksum = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_BASE_HEADER_CHECKSUM)); - final LhaArchiveEntry entry = new LhaArchiveEntry(); - entry.setCompressionMethod(getCompressionMethod(buffer)); + final String compressionMethod = getCompressionMethod(buffer); long skipSize = Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_1_OFFSET_SKIP_SIZE)); - entry.setSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_1_OFFSET_ORIGINAL_SIZE))); - entry.setLastModifiedDate(new Date(ZipUtil.dosToJavaTime(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_1_OFFSET_LAST_MODIFIED_DATE_TIME))))); + + final LhaArchiveEntry.Builder entryBuilder = new LhaArchiveEntry.Builder() + .setCompressionMethod(compressionMethod) + .setSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_1_OFFSET_ORIGINAL_SIZE))) + .setLastModifiedDate(new Date(ZipUtil.dosToJavaTime(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_1_OFFSET_LAST_MODIFIED_DATE_TIME))))); final int filenameLength = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_FILENAME_LENGTH)); buffer.position(HEADER_LEVEL_1_OFFSET_FILENAME); - entry.setName(getPathname(buffer, filenameLength)); - - entry.setDirectory(isDirectory(entry.getCompressionMethod())); - - entry.setCrcValue(Short.toUnsignedInt(buffer.getShort())); - entry.setOsId(Byte.toUnsignedInt(buffer.get())); + entryBuilder.setFilename(getPathname(buffer, filenameLength)) + .setDirectory(isDirectory(compressionMethod)) + .setCrcValue(Short.toUnsignedInt(buffer.getShort())) + .setOsId(Byte.toUnsignedInt(buffer.get())); if (calculateHeaderChecksum(buffer) != baseHeaderChecksum) { throw new ArchiveException("Invalid header level 1 checksum"); @@ -327,14 +329,16 @@ protected LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException final ByteBuffer extendedHeaderBuffer = readExtendedHeader(nextHeaderSize); skipSize -= nextHeaderSize; - parseExtendedHeader(extendedHeaderBuffer, entry); + parseExtendedHeader(extendedHeaderBuffer, entryBuilder); headerParts.add(extendedHeaderBuffer); nextHeaderSize = Short.toUnsignedInt(extendedHeaderBuffer.getShort(extendedHeaderBuffer.limit() - 2)); } - entry.setCompressedSize(skipSize); + entryBuilder.setCompressedSize(skipSize); + + final LhaArchiveEntry entry = entryBuilder.get(); if (entry.getHeaderCrc() != null) { // Calculate CRC16 of full header @@ -364,15 +368,16 @@ protected LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException buffer = readRemainingHeaderData(buffer, headerSize); - final LhaArchiveEntry entry = new LhaArchiveEntry(); - entry.setCompressionMethod(getCompressionMethod(buffer)); - entry.setCompressedSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_2_OFFSET_COMPRESSED_SIZE))); - entry.setSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_2_OFFSET_ORIGINAL_SIZE))); - entry.setLastModifiedDate(new Date(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_2_OFFSET_LAST_MODIFIED_DATE_TIME)) * 1000)); - entry.setName(""); - entry.setDirectory(isDirectory(entry.getCompressionMethod())); - entry.setCrcValue(Short.toUnsignedInt(buffer.getShort(HEADER_LEVEL_2_OFFSET_CRC))); - entry.setOsId(Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_2_OFFSET_OS_ID))); + final String compressionMethod = getCompressionMethod(buffer); + + final LhaArchiveEntry.Builder entryBuilder = new LhaArchiveEntry.Builder() + .setCompressionMethod(compressionMethod) + .setCompressedSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_2_OFFSET_COMPRESSED_SIZE))) + .setSize(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_2_OFFSET_ORIGINAL_SIZE))) + .setLastModifiedDate(new Date(Integer.toUnsignedLong(buffer.getInt(HEADER_LEVEL_2_OFFSET_LAST_MODIFIED_DATE_TIME)) * 1000)) + .setDirectory(isDirectory(compressionMethod)) + .setCrcValue(Short.toUnsignedInt(buffer.getShort(HEADER_LEVEL_2_OFFSET_CRC))) + .setOsId(Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_2_OFFSET_OS_ID))); int extendedHeaderOffset = HEADER_LEVEL_2_OFFSET_FIRST_EXTENDED_HEADER_SIZE; int nextHeaderSize = Short.toUnsignedInt(buffer.getShort(extendedHeaderOffset)); @@ -382,11 +387,13 @@ protected LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException extendedHeaderOffset += nextHeaderSize; - parseExtendedHeader(extendedHeaderBuffer, entry); + parseExtendedHeader(extendedHeaderBuffer, entryBuilder); nextHeaderSize = Short.toUnsignedInt(extendedHeaderBuffer.getShort(extendedHeaderBuffer.limit() - 2)); } + final LhaArchiveEntry entry = entryBuilder.get(); + if (entry.getHeaderCrc() != null) { // Calculate CRC16 of full header final long headerCrc = calculateCRC16(buffer); @@ -515,17 +522,17 @@ private ByteBuffer readExtendedHeader(final int headerSize) throws IOException { * Parse the extended header and set the values in the provided entry. * * @param extendedHeaderBuffer the buffer containing the extended header - * @param entry the entry to set the values in + * @param entryBuilder the entry builder to set the values in * @throws IOException */ - protected void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final LhaArchiveEntry entry) throws IOException { + protected void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final LhaArchiveEntry.Builder entryBuilder) throws IOException { final int extendedHeaderType = Byte.toUnsignedInt(extendedHeaderBuffer.get()); if (extendedHeaderType == EXTENDED_HEADER_TYPE_COMMON) { // Common header final int crcPos = extendedHeaderBuffer.position(); // Save the current position to be able to set the header CRC later // Header CRC - entry.setHeaderCrc(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); + entryBuilder.setHeaderCrc(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); // Set header CRC to zero to be able to later compute the CRC of the full header extendedHeaderBuffer.putShort(crcPos, (short) 0); @@ -533,49 +540,31 @@ protected void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final // File name header final int filenameLength = extendedHeaderBuffer.limit() - extendedHeaderBuffer.position() - 2; final String filename = getPathname(extendedHeaderBuffer, filenameLength); - if (entry.getName() == null || "".equals(entry.getName())) { - entry.setName(filename); - } else { - final StringBuilder entryNameBuilder = new StringBuilder(entry.getName()); - if (entryNameBuilder.charAt(entryNameBuilder.length() - 1) != fileSeparatorChar) { - // If the entry name does not end with a file separator, append it - entryNameBuilder.append(fileSeparatorChar); - } - - entryNameBuilder.append(filename); - - entry.setName(entryNameBuilder.toString()); - } + entryBuilder.setFilename(filename); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_DIRECTORY_NAME) { // Directory name header final int directoryNameLength = extendedHeaderBuffer.limit() - extendedHeaderBuffer.position() - 2; final String directoryName = getPathname(extendedHeaderBuffer, directoryNameLength); - if (entry.getName() == null || "".equals(entry.getName())) { - entry.setName(directoryName); + if (directoryName.charAt(directoryName.length() - 1) != fileSeparatorChar) { + // If the directory name does not end with a file separator, append it + entryBuilder.setDirectoryName(directoryName + fileSeparatorChar); } else { - final StringBuilder entryNameBuilder = new StringBuilder(directoryName); - if (entryNameBuilder.charAt(entryNameBuilder.length() - 1) != fileSeparatorChar) { - // If the directory name does not end with a file separator, append it - entryNameBuilder.append(fileSeparatorChar); - } - - entryNameBuilder.append(entry.getName()); - - entry.setName(entryNameBuilder.toString()); + entryBuilder.setDirectoryName(directoryName); } + } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_MSDOS_FILE_ATTRIBUTES) { // MS-DOS file attributes - entry.setMsdosFileAttributes(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); + entryBuilder.setMsdosFileAttributes(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_PERMISSION) { // UNIX file permission - entry.setUnixPermissionMode(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); + entryBuilder.setUnixPermissionMode(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_UID_GID) { // UNIX group/user ID - entry.setUnixGroupId(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); - entry.setUnixUserId(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); + entryBuilder.setUnixGroupId(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); + entryBuilder.setUnixUserId(Short.toUnsignedInt(extendedHeaderBuffer.getShort())); } else if (extendedHeaderType == EXTENDED_HEADER_TYPE_UNIX_TIMESTAMP) { // UNIX last modified time - entry.setLastModifiedDate(new Date(Integer.toUnsignedLong(extendedHeaderBuffer.getInt()) * 1000)); + entryBuilder.setLastModifiedDate(new Date(Integer.toUnsignedLong(extendedHeaderBuffer.getInt()) * 1000)); } // Ignore unknown extended header diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java index 428cd165278..4242b0cc6ec 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntryTest.java @@ -28,14 +28,15 @@ class LhaArchiveEntryTest { @Test void testToStringMinimal() { - final LhaArchiveEntry entry = new LhaArchiveEntry(); - entry.setName("test1.txt"); - entry.setDirectory(false); - entry.setSize(57); - entry.setLastModifiedDate(new Date(1754236942000L)); // 2025-08-03T16:02:22Z - entry.setCompressedSize(52); - entry.setCompressionMethod("-lh5-"); - entry.setCrcValue(0x6496); + final LhaArchiveEntry entry = LhaArchiveEntry.builder() + .setFilename("test1.txt") + .setDirectory(false) + .setSize(57) + .setLastModifiedDate(new Date(1754236942000L)) // 2025-08-03T16:02:22Z + .setCompressedSize(52) + .setCompressionMethod("-lh5-") + .setCrcValue(0x6496) + .get(); assertEquals("LhaArchiveEntry[name=test1.txt,directory=false,size=57,lastModifiedDate=2025-08-03T16:02:22Z,compressedSize=52," + "compressionMethod=-lh5-,crcValue=0x6496]", entry.toString()); @@ -43,20 +44,22 @@ void testToStringMinimal() { @Test void testToStringAllFields() { - final LhaArchiveEntry entry = new LhaArchiveEntry(); - entry.setName("dir1/test1.txt"); - entry.setDirectory(false); - entry.setSize(57); - entry.setLastModifiedDate(new Date(1754236942000L)); // 2025-08-03T16:02:22Z - entry.setCompressedSize(52); - entry.setCompressionMethod("-lh5-"); - entry.setCrcValue(0x6496); - entry.setOsId(85); - entry.setUnixPermissionMode(0100644); - entry.setUnixGroupId(20); - entry.setUnixUserId(501); - entry.setMsdosFileAttributes(0x0010); - entry.setHeaderCrc(0xb772); + final LhaArchiveEntry entry = LhaArchiveEntry.builder() + .setFilename("test1.txt") + .setDirectoryName("dir1/") + .setDirectory(false) + .setSize(57) + .setLastModifiedDate(new Date(1754236942000L)) // 2025-08-03T16:02:22Z + .setCompressedSize(52) + .setCompressionMethod("-lh5-") + .setCrcValue(0x6496) + .setOsId(85) + .setUnixPermissionMode(0100644) + .setUnixGroupId(20) + .setUnixUserId(501) + .setMsdosFileAttributes(0x0010) + .setHeaderCrc(0xb772) + .get(); assertEquals( "LhaArchiveEntry[name=dir1/test1.txt,directory=false,size=57,lastModifiedDate=2025-08-03T16:02:22Z,compressedSize=52," + diff --git a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java index ed0ab0bb23b..04f8f14429a 100644 --- a/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStreamTest.java @@ -1116,96 +1116,96 @@ void testParseHeaderLevel2FileWithMsdosAttributes() throws IOException { @Test void testParseExtendedHeaderCommon() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { - final LhaArchiveEntry entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x00, 0x22, 0x33, 0x00, 0x00), entry); - assertEquals(0x3322, entry.getHeaderCrc()); + final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x00, 0x22, 0x33, 0x00, 0x00), entryBuilder); + assertEquals(0x3322, entryBuilder.get().getHeaderCrc()); } } @Test void testParseExtendedHeaderFilename() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { - final LhaArchiveEntry entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); - assertEquals("test.txt", entry.getName()); + final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entryBuilder); + assertEquals("test.txt", entryBuilder.get().getName()); } } @Test void testParseExtendedHeaderDirectoryName() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '/')) { - final LhaArchiveEntry entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entry); - assertEquals("dir1/", entry.getName()); + final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entryBuilder); + assertEquals("dir1/", entryBuilder.get().getName()); } } @Test void testParseExtendedHeaderFilenameAndDirectoryName() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]), null, '/')) { - LhaArchiveEntry entry; + LhaArchiveEntry.Builder entryBuilder; // Test filename and directory name order - entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); - archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entry); - assertEquals("dir1/test.txt", entry.getName()); + entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entryBuilder); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entryBuilder); + assertEquals("dir1/test.txt", entryBuilder.get().getName()); // Test filename and directory name order, no trailing slash - entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); - archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0x00, 0x00), entry); - assertEquals("dir1/test.txt", entry.getName()); + entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entryBuilder); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0x00, 0x00), entryBuilder); + assertEquals("dir1/test.txt", entryBuilder.get().getName()); // Test directory name and filename order - entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entry); - archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); - assertEquals("dir1/test.txt", entry.getName()); + entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0xff, 0x00, 0x00), entryBuilder); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entryBuilder); + assertEquals("dir1/test.txt", entryBuilder.get().getName()); // Test directory name and filename order, no trailing slash - entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0x00, 0x00), entry); - archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entry); - assertEquals("dir1/test.txt", entry.getName()); + entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x02, 'd', 'i', 'r', '1', 0x00, 0x00), entryBuilder); + archive.parseExtendedHeader(toByteBuffer(0x01, 't', 'e', 's', 't', '.', 't', 'x', 't', 0x00, 0x00), entryBuilder); + assertEquals("dir1/test.txt", entryBuilder.get().getName()); } } @Test void testParseExtendedHeaderUnixPermission() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { - final LhaArchiveEntry entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x50, 0xa4, 0x81, 0x00, 0x00), entry); - assertEquals(0x81a4, entry.getUnixPermissionMode()); - assertEquals(0100644, entry.getUnixPermissionMode()); + final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x50, 0xa4, 0x81, 0x00, 0x00), entryBuilder); + assertEquals(0x81a4, entryBuilder.get().getUnixPermissionMode()); + assertEquals(0100644, entryBuilder.get().getUnixPermissionMode()); } } @Test void testParseExtendedHeaderUnixUidGid() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { - final LhaArchiveEntry entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x51, 0x14, 0x00, 0xf5, 0x01, 0x00, 0x00), entry); - assertEquals(0x0014, entry.getUnixGroupId()); - assertEquals(0x01f5, entry.getUnixUserId()); + final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x51, 0x14, 0x00, 0xf5, 0x01, 0x00, 0x00), entryBuilder); + assertEquals(0x0014, entryBuilder.get().getUnixGroupId()); + assertEquals(0x01f5, entryBuilder.get().getUnixUserId()); } } @Test void testParseExtendedHeaderUnixTimestamp() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { - final LhaArchiveEntry entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x54, 0x5c, 0x73, 0x9c, 0x68, 0x00, 0x00), entry); - assertEquals(0x689c735cL, entry.getLastModifiedDate().getTime() / 1000); + final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x54, 0x5c, 0x73, 0x9c, 0x68, 0x00, 0x00), entryBuilder); + assertEquals(0x689c735cL, entryBuilder.get().getLastModifiedDate().getTime() / 1000); } } @Test void testParseExtendedHeaderMSdosFileAttributes() throws IOException { try (LhaArchiveInputStream archive = new LhaArchiveInputStream(new ByteArrayInputStream(new byte[0]))) { - final LhaArchiveEntry entry = new LhaArchiveEntry(); - archive.parseExtendedHeader(toByteBuffer(0x40, 0x10, 0x00, 0x00), entry); - assertEquals(0x10, entry.getMsdosFileAttributes()); + final LhaArchiveEntry.Builder entryBuilder = LhaArchiveEntry.builder(); + archive.parseExtendedHeader(toByteBuffer(0x40, 0x10, 0x00, 0x00), entryBuilder); + assertEquals(0x10, entryBuilder.get().getMsdosFileAttributes()); } } From dedf17d4a6e2379a96b3ca1eb996f1d73c770d00 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 23 Aug 2025 16:24:14 +0200 Subject: [PATCH 10/22] Update javadocs --- .../archivers/lha/LhaArchiveEntry.java | 4 +- .../archivers/lha/LhaArchiveInputStream.java | 6 +- .../compress/archivers/lha/package-info.java | 3 +- ...tLhStaticHuffmanCompressorInputStream.java | 67 ++++++++++++------- .../lha/Lh4CompressorInputStream.java | 2 +- .../compressors/lha/package-info.java | 2 +- .../lha/Lh4CompressorInputStreamTest.java | 2 +- .../lha/Lh5CompressorInputStreamTest.java | 2 +- .../lha/Lh6CompressorInputStreamTest.java | 2 +- .../lha/Lh7CompressorInputStreamTest.java | 2 +- 10 files changed, 56 insertions(+), 36 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java index 5f481013087..6aea79e31ac 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveEntry.java @@ -147,7 +147,7 @@ public int getCrcValue() { /** * Gets the operating system id if available for this entry. * - * @return operating system id if available + * @return operating system id or null if not available */ public Integer getOsId() { return osId; @@ -183,7 +183,7 @@ public Integer getUnixGroupId() { /** * Gets the MS-DOS file attributes if available for this entry. * - * @return MS-DOS file attributes if available + * @return MS-DOS file attributes or null if not available */ public Integer getMsdosFileAttributes() { return msdosFileAttributes; diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index 72c3ae42b90..678f619d446 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -408,7 +408,7 @@ protected LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException } /** - * Get the compression method from the header. It is always located at the same offset for all header levels. + * Gets the compression method from the header. It is always located at the same offset for all header levels. * * @param buffer the buffer containing the header data * @return compression method, e.g. -lh5- @@ -436,7 +436,7 @@ protected static String getCompressionMethod(final ByteBuffer buffer) throws Arc } /** - * Get the pathname from the current position in the provided buffer. Any 0xFF bytes + * Gets the pathname from the current position in the provided buffer. Any 0xFF bytes * and '\' chars will be converted into the configured file path separator char. * Any leading file path separator char will be removed to avoid extracting to * absolute locations. @@ -571,7 +571,7 @@ protected void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final } /** - * Check if the compression method is a directory entry. + * Tests whether the compression method is a directory entry. * * @param compressionMethod the compression method * @return true if the compression method is a directory entry, false otherwise diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/package-info.java b/src/main/java/org/apache/commons/compress/archivers/lha/package-info.java index 370527b3366..6ce4194a9fd 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/package-info.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/package-info.java @@ -18,6 +18,7 @@ */ /** - * Provides stream classes for reading archives using the LHA format, also known as the LZH format or LHarc format. + * Provides stream classes for reading archives using the LHA format, + * also known as the LZH format or LHarc format. */ package org.apache.commons.compress.archivers.lha; diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java index 9e72ccd6a95..e16ca9bccb3 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java @@ -33,27 +33,41 @@ /** * This is an implementation of a static Huffman compressor input stream for LHA files that * supports lh4, lh5, lh6 and lh7 compression methods. - * - * This implementation is based on the documentation that can be found at - * https://github.com/jca02266/lha/blob/master/Hacking_of_LHa */ abstract class AbstractLhStaticHuffmanCompressorInputStream extends CompressorInputStream implements InputStreamStatistics { - // Constants for command tree decoding - private static final int COMMAND_DECODING_LENGTH_BITS = 5; // Number of bits used to encode the command decoding tree length - private static final int MAX_NUMBER_OF_COMMAND_DECODING_CODE_LENGTHS = 19; // Maximum number of codes in the command decoding tree - - // Constants for command tree - private static final int COMMAND_TREE_LENGTH_BITS = 9; // Number of bits used to encode the command tree length - - // Constants for code length - private static final int CODE_LENGTH_BITS = 3; // Number of bits used to encode the code length + /** + * Number of bits used to encode the command decoding tree length. + */ + private static final int COMMAND_DECODING_LENGTH_BITS = 5; + /** + * Maximum number of codes in the command decoding tree. + */ + private static final int MAX_NUMBER_OF_COMMAND_DECODING_CODE_LENGTHS = 19; + /** + * Number of bits used to encode the command tree length. + */ + private static final int COMMAND_TREE_LENGTH_BITS = 9; + /** + * Number of literal codes (0-255). + */ + private static final int NUMBER_OF_LITERAL_CODES = 0x100; + /** + * Number of bits used to encode the code length. + */ + private static final int CODE_LENGTH_BITS = 3; private static final int MAX_CODE_LENGTH = 16; private BitInputStream bin; private CircularBuffer buffer; private int blockSize; - private BinaryTree commandTree; // Command is either a literal or a copy command - private BinaryTree distanceTree; // Distance is the offset to copy from the sliding dictionary + /** + * Command is either a literal or a copy command. + */ + private BinaryTree commandTree; + /** + * Distance is the offset to copy from the sliding dictionary. + */ + private BinaryTree distanceTree; /** * Constructs a new CompressorInputStream which decompresses bytes read from the specified stream. @@ -82,7 +96,7 @@ public void close() throws IOException { } /** - * Get the threshold for copying data from the sliding dictionary. This is the minimum + * Gets the threshold for copying data from the sliding dictionary. This is the minimum * possible number of bytes that will be part of a copy command. * * @return the copy threshold @@ -92,14 +106,14 @@ protected int getCopyThreshold() { } /** - * Get the number of bits used for the dictionary size. + * Gets the number of bits used for the dictionary size. * * @return the number of bits used for the dictionary size */ protected abstract int getDictionaryBits(); /** - * Get the size of the dictionary. + * Gets the size of the dictionary. * * @return the size of the dictionary */ @@ -108,18 +122,23 @@ protected int getDictionarySize() { } /** - * Get the number of bits used for the distance. + * Gets the number of bits used for the distance. * * @return the number of bits used for the distance */ protected abstract int getDistanceBits(); - protected int getDistanceCodeSize() { + /** + * Gets the maximum number of distance codes in the distance tree. + * + * @return the maximum number of distance codes + */ + protected int getMaxNumberOfDistanceCodes() { return getDictionaryBits() + 1; } /** - * Get the maximum match length for the copy command. + * Gets the maximum match length for the copy command. * * @return the maximum match length */ @@ -128,13 +147,13 @@ protected int getMaxMatchLength() { } /** - * Get the maximum number of commands in the command tree. + * Gets the maximum number of commands in the command tree. * This is 256 literals (0-255) and 254 copy lengths combinations (3-256). * * @return the maximum number of commands */ protected int getMaxNumberOfCommands() { - return 256 + getMaxMatchLength() - getCopyThreshold() + 1; + return NUMBER_OF_LITERAL_CODES + getMaxMatchLength() - getCopyThreshold() + 1; } @Override @@ -306,8 +325,8 @@ private BinaryTree readDistanceTree() throws IOException { // Number of code lengths to read final int numCodeLengths = readBits(getDistanceBits()); - if (numCodeLengths > getDistanceCodeSize()) { - throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, getDistanceCodeSize()); + if (numCodeLengths > getMaxNumberOfDistanceCodes()) { + throw new CompressorException("Code length table has invalid size (%d > %d)", numCodeLengths, getMaxNumberOfDistanceCodes()); } else if (numCodeLengths == 0) { // If numCodeLengths is zero, we read a single code length of getDistanceBits() bits and use as root of the tree return new BinaryTree(readBits(getDistanceBits())); diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java index 8ba3edb3fcd..44732508201 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java @@ -43,7 +43,7 @@ protected int getDistanceBits() { } @Override - protected int getDistanceCodeSize() { + protected int getMaxNumberOfDistanceCodes() { return getDictionaryBits() + 2; } } diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/package-info.java b/src/main/java/org/apache/commons/compress/compressors/lha/package-info.java index 92d07a68dac..8daf239f972 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/package-info.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/package-info.java @@ -18,6 +18,6 @@ */ /** - * Provides stream classes for decompressing streams found in LHA archives. + * Provides stream classes for decompressing streams found in LHA archives. */ package org.apache.commons.compress.compressors.lha; diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java index 3b59834715b..f336a6f2c1a 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStreamTest.java @@ -40,7 +40,7 @@ void testConfiguration() throws IOException { assertEquals(12, in.getDictionaryBits()); assertEquals(4096, in.getDictionarySize()); assertEquals(4, in.getDistanceBits()); - assertEquals(14, in.getDistanceCodeSize()); + assertEquals(14, in.getMaxNumberOfDistanceCodes()); assertEquals(256, in.getMaxMatchLength()); assertEquals(510, in.getMaxNumberOfCommands()); } diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java index fc32ebb6a8e..64413a72b9b 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStreamTest.java @@ -40,7 +40,7 @@ void testConfiguration() throws IOException { assertEquals(8192, in.getDictionarySize()); assertEquals(13, in.getDictionaryBits()); assertEquals(4, in.getDistanceBits()); - assertEquals(14, in.getDistanceCodeSize()); + assertEquals(14, in.getMaxNumberOfDistanceCodes()); assertEquals(256, in.getMaxMatchLength()); assertEquals(510, in.getMaxNumberOfCommands()); } diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java index b7e84463322..e359b30b978 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStreamTest.java @@ -40,7 +40,7 @@ void testConfiguration() throws IOException { assertEquals(15, in.getDictionaryBits()); assertEquals(32768, in.getDictionarySize()); assertEquals(5, in.getDistanceBits()); - assertEquals(16, in.getDistanceCodeSize()); + assertEquals(16, in.getMaxNumberOfDistanceCodes()); assertEquals(256, in.getMaxMatchLength()); assertEquals(510, in.getMaxNumberOfCommands()); } diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java index 4a1825c563e..a54e16a1fc6 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStreamTest.java @@ -40,7 +40,7 @@ void testConfiguration() throws IOException { assertEquals(16, in.getDictionaryBits()); assertEquals(65536, in.getDictionarySize()); assertEquals(5, in.getDistanceBits()); - assertEquals(17, in.getDistanceCodeSize()); + assertEquals(17, in.getMaxNumberOfDistanceCodes()); assertEquals(256, in.getMaxMatchLength()); assertEquals(510, in.getMaxNumberOfCommands()); } From 7152f3e753e8a6b38e262e052da06c81a9abb62c Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 23 Aug 2025 18:12:49 +0200 Subject: [PATCH 11/22] Fix more EOF issues --- ...bstractLhStaticHuffmanCompressorInputStream.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java index e16ca9bccb3..ee3c569d79a 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java @@ -202,7 +202,9 @@ private void fillBuffer() throws IOException { this.blockSize--; final int command = commandTree.read(bin); - if (command < 0x100) { + if (command == -1) { + throw new CompressorException("Unexpected end of stream"); + } else if (command < 0x100) { // Literal command, just write the byte to the buffer buffer.put(command); } else { @@ -296,7 +298,9 @@ protected BinaryTree readCommandTree(final BinaryTree commandDecodingTree) throw for (int index = 0; index < numCodeLengths;) { final int codeOrSkipRange = commandDecodingTree.read(bin); - if (codeOrSkipRange == 0) { + if (codeOrSkipRange == -1) { + throw new CompressorException("Unexpected end of stream"); + } else if (codeOrSkipRange == 0) { // Skip one code length index++; } else if (codeOrSkipRange == 1) { @@ -351,8 +355,9 @@ private BinaryTree readDistanceTree() throws IOException { private int readDistance() throws IOException { // Determine the number of bits to read for the distance by reading an entry from the distance tree final int bits = distanceTree.read(bin); - - if (bits == 0 || bits == 1) { + if (bits == -1) { + throw new CompressorException("Unexpected end of stream"); + } else if (bits == 0 || bits == 1) { // This is effectively run length encoding return bits; } else { From 68e8ccea4f52905299e08d135bb8ac61fb55f18e Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sun, 24 Aug 2025 10:39:33 +0200 Subject: [PATCH 12/22] Use constant NUMBER_OF_LITERAL_CODES --- .../lha/AbstractLhStaticHuffmanCompressorInputStream.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java index ee3c569d79a..750733d4f8b 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java @@ -204,13 +204,13 @@ private void fillBuffer() throws IOException { final int command = commandTree.read(bin); if (command == -1) { throw new CompressorException("Unexpected end of stream"); - } else if (command < 0x100) { + } else if (command < NUMBER_OF_LITERAL_CODES) { // Literal command, just write the byte to the buffer buffer.put(command); } else { // Copy command, read the distance and calculate the length from the command final int distance = readDistance(); - final int length = command - 0x100 + getCopyThreshold(); + final int length = command - NUMBER_OF_LITERAL_CODES + getCopyThreshold(); // Copy the data from the sliding dictionary and add to the buffer buffer.copy(distance + 1, length); From 9fb4d028c3339b9fe328c605102ad4159d030490 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sun, 24 Aug 2025 11:31:10 +0200 Subject: [PATCH 13/22] Update test case --- .../compressors/lha/BinaryTreeTest.java | 58 ++++++++++--------- 1 file changed, 32 insertions(+), 26 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java b/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java index 69d120b55a0..81474bfa1b5 100644 --- a/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/lha/BinaryTreeTest.java @@ -152,35 +152,41 @@ void testTree9() throws Exception { @Test void testTree10() throws Exception { - // Maximum length of 510 entries - final int[] length = new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 9, 0, 0, 0, - 0, 13, 8, 0, 0, 0, 0, 8, 9, 9, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 8, 8, 0, 0, 0, 9, 0, 9, 13, 11, 11, 10, 10, 10, 10, 9, 13, 11, 10, 11, - 10, 10, 10, 13, 11, 10, 9, 13, 13, 10, 0, 13, 0, 10, 0, 13, 0, 0, 0, 7, 8, 8, 7, 6, 8, 8, 8, 7, 10, 9, 7, 7, 7, 7, 8, 11, 7, 6, 7, 7, 9, - 8, 10, 8, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 4, 4, 4, 5, 5, 6, 7, 7, 7, 7, 8, 11, 10, 10, 12, 12, 0, 12, 12, 13, 0, 0, 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 }; + // Maximum length of 510 entries for command tree and maximum supported depth of 16 + final int[] length = new int[] { 4, 7, 7, 8, 7, 9, 8, 9, 7, 10, 8, 10, 7, 10, 8, 10, 7, 9, 8, 9, 8, 10, 8, 12, 8, 10, 9, 11, 9, 9, 8, 10, 6, 9, + 7, 9, 8, 10, 8, 11, 7, 9, 8, 9, 8, 9, 8, 9, 7, 9, 8, 8, 8, 10, 9, 11, 8, 9, 8, 10, 8, 9, 8, 9, 7, 7, 7, 8, 8, 8, 8, 9, 7, 8, 7, 9, 8, 9, + 8, 8, 8, 10, 7, 7, 8, 8, 8, 9, 8, 9, 8, 9, 9, 10, 9, 10, 7, 8, 9, 9, 8, 7, 7, 7, 8, 8, 9, 8, 8, 9, 8, 8, 8, 11, 8, 9, 8, 8, 9, 10, 9, 9, + 8, 10, 8, 10, 9, 9, 7, 9, 9, 10, 9, 10, 9, 9, 9, 10, 9, 11, 10, 11, 9, 10, 8, 10, 9, 11, 9, 10, 10, 12, 9, 11, 9, 12, 10, 14, 10, 14, 10, + 11, 10, 11, 9, 11, 10, 12, 9, 11, 10, 11, 9, 10, 10, 11, 9, 11, 10, 12, 10, 13, 11, 13, 10, 11, 10, 13, 10, 15, 10, 14, 8, 10, 9, 10, 9, + 10, 10, 11, 9, 11, 10, 12, 10, 13, 10, 13, 9, 11, 9, 11, 9, 12, 9, 11, 9, 10, 9, 12, 9, 11, 9, 9, 9, 10, 8, 10, 9, 11, 9, 10, 9, 10, 9, + 10, 9, 10, 9, 11, 8, 10, 9, 10, 9, 10, 9, 11, 9, 10, 8, 10, 8, 10, 9, 7, 3, 4, 5, 5, 6, 7, 7, 7, 8, 8, 9, 9, 9, 9, 10, 10, 11, 11, 11, + 10, 11, 12, 11, 12, 12, 12, 12, 13, 13, 13, 14, 12, 14, 13, 16, 14, 16, 13, 15, 14, 13, 15, 14, 15, 14, 15, 14, 14, 0, 14, 15, 14, 0, + 14, 0, 0, 0, 0, 0, 0, 0, 15, 0, 15, 0, 0, 15, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 0, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 15, 10 }; final BinaryTree tree = new BinaryTree(length); - assertEquals(509, tree.read(createBitInputStream(0x00, 0x00))); // 0xxx xxxx xxxx xxxx - assertEquals(256, tree.read(createBitInputStream(0x80, 0x00))); // 1000 xxxx xxxx xxxx - assertEquals(257, tree.read(createBitInputStream(0x90, 0x00))); // 1001 xxxx xxxx xxxx - assertEquals(258, tree.read(createBitInputStream(0xa0, 0x00))); // 1010 xxxx xxxx xxxx - assertEquals(259, tree.read(createBitInputStream(0xb0, 0x00))); // 1011 0xxx xxxx xxxx - assertEquals(260, tree.read(createBitInputStream(0xb8, 0x00))); // 1011 1xxx xxxx xxxx - assertEquals(101, tree.read(createBitInputStream(0xc0, 0x00))); // 1100 00xx xxxx xxxx - assertEquals(115, tree.read(createBitInputStream(0xc4, 0x00))); // 1100 01xx xxxx xxxx - - assertEquals(93, tree.read(createBitInputStream(0xff, 0xe0))); // 1111 1111 1110 0xxx - assertEquals(122, tree.read(createBitInputStream(0xff, 0xe8))); // 1111 1111 1110 1xxx - assertEquals(275, tree.read(createBitInputStream(0xff, 0xf0))); // 1111 1111 1111 0xxx - assertEquals(283, tree.read(createBitInputStream(0xff, 0xf8))); // 1111 1111 1111 1xxx + assertEquals(256, tree.read(createBitInputStream(0x00, 0x00))); // 000x xxxx xxxx xxxx + assertEquals(0, tree.read(createBitInputStream(0x20, 0x00))); // 0010 xxxx xxxx xxxx + assertEquals(257, tree.read(createBitInputStream(0x30, 0x00))); // 0011 xxxx xxxx xxxx + assertEquals(258, tree.read(createBitInputStream(0x40, 0x00))); // 0100 0xxx xxxx xxxx + assertEquals(259, tree.read(createBitInputStream(0x48, 0x00))); // 0100 1xxx xxxx xxxx + assertEquals(32, tree.read(createBitInputStream(0x50, 0x00))); // 0101 00xx xxxx xxxx + assertEquals(260, tree.read(createBitInputStream(0x54, 0x00))); // 0101 01xx xxxx xxxx + + assertEquals(226, tree.read(createBitInputStream(0xbd, 0x00))); // 1011 1101 xxxx xxxx + assertEquals(240, tree.read(createBitInputStream(0xbe, 0x00))); // 1011 1110 xxxx xxxx + + assertEquals(163, tree.read(createBitInputStream(0xfb, 0xa0))); // 1111 1011 101x xxxx + assertEquals(165, tree.read(createBitInputStream(0xfb, 0xc0))); // 1111 1011 110x xxxx + + assertEquals(499, tree.read(createBitInputStream(0xff, 0xfa))); // 1111 1111 1111 101x + assertEquals(508, tree.read(createBitInputStream(0xff, 0xfc))); // 1111 1111 1111 110x + assertEquals(290, tree.read(createBitInputStream(0xff, 0xfe))); // 1111 1111 1111 1110 + assertEquals(292, tree.read(createBitInputStream(0xff, 0xff))); // 1111 1111 1111 1111 } @Test From 8f53529cce110b5aad5451fae51936eb7db5aba5 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Fri, 29 Aug 2025 19:42:36 +0200 Subject: [PATCH 14/22] Change to package private instead of protected --- .../archivers/lha/LhaArchiveInputStream.java | 16 +++++++-------- ...tLhStaticHuffmanCompressorInputStream.java | 20 +++++++++---------- .../lha/Lh4CompressorInputStream.java | 6 +++--- .../lha/Lh5CompressorInputStream.java | 4 ++-- .../lha/Lh6CompressorInputStream.java | 4 ++-- .../lha/Lh7CompressorInputStream.java | 4 ++-- 6 files changed, 27 insertions(+), 27 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index 678f619d446..3123128775a 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -135,7 +135,7 @@ public LhaArchiveInputStream(final InputStream inputStream, final String charset * @param charsetName the charset used for file names in the archive. May be {@code null} to use US-ASCII as default. * @param fileSeparatorChar the character used to separate file path elements */ - public LhaArchiveInputStream(final InputStream inputStream, final String charsetName, final char fileSeparatorChar) { + LhaArchiveInputStream(final InputStream inputStream, final String charsetName, final char fileSeparatorChar) { super(inputStream, charsetName == null ? StandardCharsets.US_ASCII.name() : charsetName); this.fileSeparatorChar = fileSeparatorChar; } @@ -212,7 +212,7 @@ public LhaArchiveEntry getNextEntry() throws IOException { * @return the next header entry, or null if there are no more entries * @throws IOException */ - protected LhaArchiveEntry readHeader() throws IOException { + LhaArchiveEntry readHeader() throws IOException { // Header level is not known yet. Read the minimum length header. final byte[] buffer = new byte[HEADER_GENERIC_MINIMUM_HEADER_LENGTH]; final int len = IOUtils.read(in, buffer); @@ -248,7 +248,7 @@ protected LhaArchiveEntry readHeader() throws IOException { * @return the LhaArchiveEntry read from the buffer * @throws IOException */ - protected LhaArchiveEntry readHeaderLevel0(ByteBuffer buffer) throws IOException { + LhaArchiveEntry readHeaderLevel0(ByteBuffer buffer) throws IOException { final int headerSize = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_0_OFFSET_HEADER_SIZE)); if (headerSize < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { throw new ArchiveException("Invalid header level 0 length: %d", headerSize); @@ -290,7 +290,7 @@ protected LhaArchiveEntry readHeaderLevel0(ByteBuffer buffer) throws IOException * @return the LhaArchiveEntry read from the buffer * @throws IOException */ - protected LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException { + LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException { final int baseHeaderSize = Byte.toUnsignedInt(buffer.get(HEADER_LEVEL_1_OFFSET_BASE_HEADER_SIZE)); if (baseHeaderSize < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { throw new ArchiveException("Invalid header level 1 length: %d", baseHeaderSize); @@ -360,7 +360,7 @@ protected LhaArchiveEntry readHeaderLevel1(ByteBuffer buffer) throws IOException * @return the LhaArchiveEntry read from the buffer * @throws IOException */ - protected LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException { + LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException { final int headerSize = Short.toUnsignedInt(buffer.getShort(HEADER_LEVEL_2_OFFSET_HEADER_SIZE)); if (headerSize < HEADER_GENERIC_MINIMUM_HEADER_LENGTH) { throw new ArchiveException("Invalid header level 2 length: %d", headerSize); @@ -414,7 +414,7 @@ protected LhaArchiveEntry readHeaderLevel2(ByteBuffer buffer) throws IOException * @return compression method, e.g. -lh5- * @throws ArchiveException if the compression method is invalid */ - protected static String getCompressionMethod(final ByteBuffer buffer) throws ArchiveException { + static String getCompressionMethod(final ByteBuffer buffer) throws ArchiveException { final byte[] compressionMethodBuffer = new byte[5]; byteBufferGet(buffer, HEADER_GENERIC_OFFSET_COMPRESSION_METHOD, compressionMethodBuffer); @@ -445,7 +445,7 @@ protected static String getCompressionMethod(final ByteBuffer buffer) throws Arc * @param pathnameLength the length of the pathname * @return pathname */ - protected String getPathname(final ByteBuffer buffer, final int pathnameLength) { + String getPathname(final ByteBuffer buffer, final int pathnameLength) { final byte[] pathnameBuffer = new byte[pathnameLength]; buffer.get(pathnameBuffer); @@ -525,7 +525,7 @@ private ByteBuffer readExtendedHeader(final int headerSize) throws IOException { * @param entryBuilder the entry builder to set the values in * @throws IOException */ - protected void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final LhaArchiveEntry.Builder entryBuilder) throws IOException { + void parseExtendedHeader(final ByteBuffer extendedHeaderBuffer, final LhaArchiveEntry.Builder entryBuilder) throws IOException { final int extendedHeaderType = Byte.toUnsignedInt(extendedHeaderBuffer.get()); if (extendedHeaderType == EXTENDED_HEADER_TYPE_COMMON) { // Common header diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java index 750733d4f8b..4e36a588860 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/AbstractLhStaticHuffmanCompressorInputStream.java @@ -101,7 +101,7 @@ public void close() throws IOException { * * @return the copy threshold */ - protected int getCopyThreshold() { + int getCopyThreshold() { return 3; } @@ -110,14 +110,14 @@ protected int getCopyThreshold() { * * @return the number of bits used for the dictionary size */ - protected abstract int getDictionaryBits(); + abstract int getDictionaryBits(); /** * Gets the size of the dictionary. * * @return the size of the dictionary */ - protected int getDictionarySize() { + int getDictionarySize() { return 1 << getDictionaryBits(); } @@ -126,14 +126,14 @@ protected int getDictionarySize() { * * @return the number of bits used for the distance */ - protected abstract int getDistanceBits(); + abstract int getDistanceBits(); /** * Gets the maximum number of distance codes in the distance tree. * * @return the maximum number of distance codes */ - protected int getMaxNumberOfDistanceCodes() { + int getMaxNumberOfDistanceCodes() { return getDictionaryBits() + 1; } @@ -142,7 +142,7 @@ protected int getMaxNumberOfDistanceCodes() { * * @return the maximum match length */ - protected int getMaxMatchLength() { + int getMaxMatchLength() { return 256; } @@ -152,7 +152,7 @@ protected int getMaxMatchLength() { * * @return the maximum number of commands */ - protected int getMaxNumberOfCommands() { + int getMaxNumberOfCommands() { return NUMBER_OF_LITERAL_CODES + getMaxMatchLength() - getCopyThreshold() + 1; } @@ -224,7 +224,7 @@ private void fillBuffer() throws IOException { * @return the command decoding tree * @throws IOException if an I/O error occurs */ - protected BinaryTree readCommandDecodingTree() throws IOException { + BinaryTree readCommandDecodingTree() throws IOException { // Number of code lengths to read final int numCodeLengths = readBits(COMMAND_DECODING_LENGTH_BITS); @@ -256,7 +256,7 @@ protected BinaryTree readCommandDecodingTree() throws IOException { * @return code length * @throws IOException if an I/O error occurs */ - protected int readCodeLength() throws IOException { + int readCodeLength() throws IOException { int len = readBits(CODE_LENGTH_BITS); if (len == 0x07) { int bit = bin.readBit(); @@ -283,7 +283,7 @@ protected int readCodeLength() throws IOException { * @return the command tree * @throws IOException if an I/O error occurs */ - protected BinaryTree readCommandTree(final BinaryTree commandDecodingTree) throws IOException { + BinaryTree readCommandTree(final BinaryTree commandDecodingTree) throws IOException { final int numCodeLengths = readBits(COMMAND_TREE_LENGTH_BITS); if (numCodeLengths > getMaxNumberOfCommands()) { diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java index 44732508201..b5c2810bfd6 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh4CompressorInputStream.java @@ -33,17 +33,17 @@ public Lh4CompressorInputStream(final InputStream in) throws IOException { } @Override - protected int getDictionaryBits() { + int getDictionaryBits() { return 12; } @Override - protected int getDistanceBits() { + int getDistanceBits() { return 4; } @Override - protected int getMaxNumberOfDistanceCodes() { + int getMaxNumberOfDistanceCodes() { return getDictionaryBits() + 2; } } diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java index 0d518f8031f..5a1c29dab69 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh5CompressorInputStream.java @@ -33,12 +33,12 @@ public Lh5CompressorInputStream(final InputStream in) throws IOException { } @Override - protected int getDictionaryBits() { + int getDictionaryBits() { return 13; } @Override - protected int getDistanceBits() { + int getDistanceBits() { return 4; } } diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java index d5ac0b7d655..900406eefb0 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh6CompressorInputStream.java @@ -33,12 +33,12 @@ public Lh6CompressorInputStream(final InputStream in) throws IOException { } @Override - protected int getDictionaryBits() { + int getDictionaryBits() { return 15; } @Override - protected int getDistanceBits() { + int getDistanceBits() { return 5; } } diff --git a/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java index 77b1b81a8ee..eb929007df9 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lha/Lh7CompressorInputStream.java @@ -33,12 +33,12 @@ public Lh7CompressorInputStream(final InputStream in) throws IOException { } @Override - protected int getDictionaryBits() { + int getDictionaryBits() { return 16; } @Override - protected int getDistanceBits() { + int getDistanceBits() { return 5; } } From 8ee42104a4f090e7375c70ba42f104c317e25f99 Mon Sep 17 00:00:00 2001 From: Fredrik Kjellberg Date: Sat, 6 Sep 2025 12:25:26 +0200 Subject: [PATCH 15/22] Use builder pattern for LhaArchiveInputStream --- .../archivers/ArchiveStreamFactory.java | 5 +- .../archivers/lha/LhaArchiveInputStream.java | 119 +++++++++--- .../archivers/ArchiveStreamFactoryTest.java | 2 +- .../lha/LhaArchiveInputStreamTest.java | 169 +++++++++++++----- .../lha/Lh4CompressorInputStreamTest.java | 2 +- .../lha/Lh5CompressorInputStreamTest.java | 2 +- .../lha/Lh6CompressorInputStreamTest.java | 2 +- .../lha/Lh7CompressorInputStreamTest.java | 2 +- 8 files changed, 227 insertions(+), 76 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index d2a2db99db8..282ff006336 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -46,6 +46,7 @@ import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.Sets; +import org.apache.commons.io.Charsets; import org.apache.commons.lang3.StringUtils; /** @@ -463,9 +464,9 @@ public > I createArchiveInp } if (LHA.equalsIgnoreCase(archiverName)) { if (actualEncoding != null) { - return (I) new LhaArchiveInputStream(in, actualEncoding); + return (I) LhaArchiveInputStream.builder().setInputStream(in).setCharset(Charsets.toCharset(actualEncoding)).get(); } - return (I) new LhaArchiveInputStream(in); + return (I) LhaArchiveInputStream.builder().setInputStream(in).get(); } if (ZIP.equalsIgnoreCase(archiverName)) { final ZipArchiveInputStream.Builder zipBuilder = ZipArchiveInputStream.builder().setInputStream(in); diff --git a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java index 3123128775a..453a78b35ee 100644 --- a/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/lha/LhaArchiveInputStream.java @@ -25,6 +25,7 @@ import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Date; @@ -38,6 +39,7 @@ import org.apache.commons.compress.compressors.lha.Lh5CompressorInputStream; import org.apache.commons.compress.compressors.lha.Lh6CompressorInputStream; import org.apache.commons.compress.compressors.lha.Lh7CompressorInputStream; +import org.apache.commons.io.Charsets; import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.io.input.ChecksumInputStream; @@ -52,6 +54,94 @@ * @since 1.29.0 */ public class LhaArchiveInputStream extends ArchiveInputStream { + + // @formatter:off + /** + * Builds a new {@link LhaArchiveInputStream}. + * + *