]> source.dussan.org Git - poi.git/commitdiff
refactor some stream code
authorPJ Fanning <fanningpj@apache.org>
Sat, 19 Feb 2022 12:08:48 +0000 (12:08 +0000)
committerPJ Fanning <fanningpj@apache.org>
Sat, 19 Feb 2022 12:08:48 +0000 (12:08 +0000)
git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1898213 13f79535-47bb-0310-9956-ffa450edef68

14 files changed:
poi-integration/src/test/java/org/apache/poi/stress/XSSFBFileHandler.java
poi-ooxml/src/main/java/org/apache/poi/xssf/usermodel/XSSFBuiltinTableStyle.java
poi-ooxml/src/test/java/org/apache/poi/ooxml/TestPOIXMLProperties.java
poi-ooxml/src/test/java/org/apache/poi/poifs/crypt/dsig/DummyKeystore.java
poi-scratchpad/src/main/java/org/apache/poi/extractor/ole2/OLE2ScratchpadExtractorFactory.java
poi-scratchpad/src/main/java/org/apache/poi/hdgf/streams/CompressedStreamStore.java
poi-scratchpad/src/main/java/org/apache/poi/hemf/draw/HemfImageRenderer.java
poi-scratchpad/src/main/java/org/apache/poi/hemf/record/emfplus/HemfPlusBrush.java
poi-scratchpad/src/main/java/org/apache/poi/hemf/record/emfplus/HemfPlusImage.java
poi-scratchpad/src/main/java/org/apache/poi/hmef/attribute/MAPIAttribute.java
poi-scratchpad/src/main/java/org/apache/poi/hmef/attribute/MAPIRtfAttribute.java
poi-scratchpad/src/main/java/org/apache/poi/hwpf/usermodel/Picture.java
poi/src/test/java/org/apache/poi/poifs/storage/RawDataUtil.java
poi/src/test/java/org/apache/poi/ss/util/NumberRenderingSpreadsheetGenerator.java

index b8a247524f42348182c9b05a7e7745182be56eb4..f60752d6ddbdfd9e6fdbe53973c55e4a2bad6bfa 100644 (file)
 ==================================================================== */
 package org.apache.poi.stress;
 
-import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.poi.openxml4j.opc.OPCPackage;
 import org.apache.poi.openxml4j.opc.PackageAccess;
 import org.apache.poi.util.IOUtils;
@@ -41,11 +41,11 @@ public class XSSFBFileHandler extends AbstractFileHandler {
     public void handleFile(InputStream stream, String path) throws Exception {
         byte[] bytes = IOUtils.toByteArray(stream);
 
-        try (OPCPackage opcPackage = OPCPackage.open(new ByteArrayInputStream(bytes))) {
+        try (OPCPackage opcPackage = OPCPackage.open(new UnsynchronizedByteArrayInputStream(bytes))) {
             testOne(opcPackage);
         }
 
-        testNotHandledByWorkbookException(OPCPackage.open(new ByteArrayInputStream(bytes)));
+        testNotHandledByWorkbookException(OPCPackage.open(new UnsynchronizedByteArrayInputStream(bytes)));
     }
 
     private void testNotHandledByWorkbookException(OPCPackage pkg) throws IOException {
index 3efb338dc483ff34040de67cdc7d0d03a2ca5ad4..a9d183fe93c792ffded036fdc3a602a5c96da46f 100644 (file)
@@ -17,7 +17,6 @@
 
 package org.apache.poi.xssf.usermodel;
 
-import java.io.ByteArrayInputStream;
 import java.io.InputStream;
 import java.nio.charset.StandardCharsets;
 import java.util.EnumMap;
@@ -29,6 +28,7 @@ import javax.xml.transform.TransformerException;
 import javax.xml.transform.dom.DOMSource;
 import javax.xml.transform.stream.StreamResult;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.commons.io.output.StringBuilderWriter;
 import org.apache.poi.ooxml.util.DocumentHelper;
 import org.apache.poi.ss.usermodel.DifferentialStyleProvider;
@@ -407,7 +407,10 @@ public enum XSSFBuiltinTableStyle {
                 // hack because I can't figure out how to get XMLBeans to parse a sub-element in a standalone manner
                 // - build a fake styles.xml file with just this built-in
                 StylesTable styles = new StylesTable();
-                styles.readFrom(new ByteArrayInputStream(styleXML(dxfsNode, tableStyleNode).getBytes(StandardCharsets.UTF_8)));
+                try (UnsynchronizedByteArrayInputStream bis = new UnsynchronizedByteArrayInputStream(
+                        styleXML(dxfsNode, tableStyleNode).getBytes(StandardCharsets.UTF_8))) {
+                    styles.readFrom(bis);
+                }
                 styleMap.put(builtIn, new XSSFBuiltinTypeStyleStyle(builtIn, styles.getExplicitTableStyle(styleName)));
             }
         } catch (Exception e) {
index 926d840affb82ab98594c7f41f169d04232348e6..fae10b0c0d093e854c2f17c653f0322113fe8009 100644 (file)
@@ -26,7 +26,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
-import java.time.ZoneId;
 import java.time.ZoneOffset;
 import java.util.Calendar;
 import java.util.Date;
index f3293b845db1851492df75a1faa005e48439be6a..3763558ae31420b1f594b22c6e92d7486ae1f43b 100644 (file)
@@ -24,7 +24,6 @@
 package org.apache.poi.poifs.crypt.dsig;
 
 import java.io.BufferedReader;
-import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
@@ -53,6 +52,7 @@ import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.poi.poifs.crypt.CryptoFunctions;
 import org.apache.poi.poifs.storage.RawDataUtil;
 import org.apache.poi.util.LocaleUtil;
@@ -139,7 +139,7 @@ public class DummyKeystore {
     public DummyKeystore(String pfxInput, String storePass) throws GeneralSecurityException, IOException {
         CryptoFunctions.registerBouncyCastle();
         keystore = KeyStore.getInstance("PKCS12");
-        try (InputStream fis = new ByteArrayInputStream(RawDataUtil.decompress(pfxInput))) {
+        try (InputStream fis = new UnsynchronizedByteArrayInputStream(RawDataUtil.decompress(pfxInput))) {
             keystore.load(fis, storePass.toCharArray());
         }
     }
index 3c4ab886f41dad8524460a2e5b125073a923aeba..2485589e3133c4d52488fd6b8f4941ba78710d15 100644 (file)
@@ -16,7 +16,6 @@
 ==================================================================== */
 package org.apache.poi.extractor.ole2;
 
-import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -24,6 +23,7 @@ import java.io.InputStream;
 import java.util.List;
 import java.util.stream.StreamSupport;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 import org.apache.poi.extractor.ExtractorFactory;
@@ -185,7 +185,7 @@ public class OLE2ScratchpadExtractorFactory implements ExtractorProvider {
             for (AttachmentChunks attachment : msg.getAttachmentFiles()) {
                 if (attachment.getAttachData() != null) {
                     byte[] data = attachment.getAttachData().getValue();
-                    nonPOIFS.add( new ByteArrayInputStream(data) );
+                    nonPOIFS.add( new UnsynchronizedByteArrayInputStream(data) );
                 } else if (attachment.getAttachmentDirectory() != null) {
                     dirs.add(attachment.getAttachmentDirectory().getDirectory());
                 }
index fc1057f8394ca0c600cda55bd49680ae2380ad73..0009a3d511f2b1ca17c494521e7932786ab01ecf 100644 (file)
@@ -17,9 +17,9 @@
 
 package org.apache.poi.hdgf.streams;
 
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.poi.hdgf.HDGFLZW;
 import org.apache.poi.util.IOUtils;
 
@@ -92,25 +92,26 @@ public final class CompressedStreamStore extends StreamStore {
      * Decompresses the given data, returning it as header + contents
      */
     public static byte[][] decompress(byte[] data, int offset, int length) throws IOException {
-        ByteArrayInputStream bais = new ByteArrayInputStream(data, offset, length);
+        try (UnsynchronizedByteArrayInputStream bais = new UnsynchronizedByteArrayInputStream(data, offset, length)) {
+            // Decompress
+            HDGFLZW lzw = new HDGFLZW();
+            byte[] decompressed = lzw.decompress(bais);
 
-        // Decompress
-        HDGFLZW lzw = new HDGFLZW();
-        byte[] decompressed = lzw.decompress(bais);
+            if (decompressed.length < 4) {
+                throw new IllegalArgumentException("Could not read enough data to decompress: " + decompressed.length);
+            }
 
-        if (decompressed.length < 4) {
-            throw new IllegalArgumentException("Could not read enough data to decompress: " + decompressed.length);
-        }
+            // Split into header and contents
+            byte[][] ret = new byte[2][];
+            ret[0] = new byte[4];
+            ret[1] = new byte[decompressed.length - 4];
 
-        // Split into header and contents
-        byte[][] ret = new byte[2][];
-        ret[0] = new byte[4];
-        ret[1] = new byte[decompressed.length - 4];
+            System.arraycopy(decompressed, 0, ret[0], 0, 4);
+            System.arraycopy(decompressed, 4, ret[1], 0, ret[1].length);
 
-        System.arraycopy(decompressed, 0, ret[0], 0, 4);
-        System.arraycopy(decompressed, 4, ret[1], 0, ret[1].length);
+            // All done
+            return ret;
+        }
 
-        // All done
-        return ret;
     }
 }
index 95d3b7e70a011b6b1b322530ec521d57777035fa..221c9d5a5ab9d0504b02859bddaffa492e31bd66 100644 (file)
@@ -25,11 +25,11 @@ import java.awt.RenderingHints;
 import java.awt.geom.Dimension2D;
 import java.awt.geom.Rectangle2D;
 import java.awt.image.BufferedImage;
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.charset.Charset;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.poi.common.usermodel.GenericRecord;
 import org.apache.poi.hemf.usermodel.HemfPicture;
 import org.apache.poi.hwmf.draw.HwmfGraphicsState;
@@ -65,7 +65,7 @@ public class HemfImageRenderer implements ImageRenderer, EmbeddedExtractor {
         if (!PictureData.PictureType.EMF.contentType.equals(contentType)) {
             throw new IOException("Invalid picture type");
         }
-        image = new HemfPicture(new ByteArrayInputStream(data));
+        image = new HemfPicture(new UnsynchronizedByteArrayInputStream(data));
     }
 
     @Override
index e713254e9e53c48387734824cee2910a71c38969..ec0ac8e465c46b0fa063ca0d7c1c4df7807e1fb6 100644 (file)
@@ -26,7 +26,6 @@ import java.awt.Color;
 import java.awt.geom.AffineTransform;
 import java.awt.geom.Point2D;
 import java.awt.geom.Rectangle2D;
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.util.AbstractMap;
 import java.util.Arrays;
@@ -40,6 +39,7 @@ import java.util.function.Supplier;
 import java.util.stream.Collectors;
 import java.util.stream.IntStream;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
 import org.apache.poi.common.usermodel.GenericRecord;
 import org.apache.poi.hemf.draw.HemfDrawProperties;
@@ -391,7 +391,7 @@ public class HemfPlusBrush {
             EmfPlusBrushData brushData = brushType.constructor.get();
             byte[] buf = getRawData(continuedObjectData);
             try {
-                brushData.init(new LittleEndianInputStream(new ByteArrayInputStream(buf)), buf.length);
+                brushData.init(new LittleEndianInputStream(new UnsynchronizedByteArrayInputStream(buf)), buf.length);
             } catch (IOException e) {
                 throw new RuntimeException(e);
             }
index 4b0a402e13c619e76b37de52bc4b70be909a1a9a..e764e78a1e9f22e3d350cb69f602dbf47198be59 100644 (file)
@@ -22,7 +22,6 @@ import static org.apache.poi.hemf.record.emfplus.HemfPlusDraw.readARGB;
 import java.awt.Color;
 import java.awt.geom.Rectangle2D;
 import java.awt.image.BufferedImage;
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.util.Collections;
 import java.util.LinkedHashMap;
@@ -32,6 +31,7 @@ import java.util.function.Supplier;
 
 import javax.imageio.ImageIO;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
 import org.apache.poi.hemf.draw.HemfDrawProperties;
 import org.apache.poi.hemf.draw.HemfGraphics;
@@ -419,21 +419,24 @@ public class HemfPlusImage {
                         if (getBitmapType() == EmfPlusBitmapDataType.PIXEL) {
                             return new Rectangle2D.Double(0, 0, bitmapWidth, bitmapHeight);
                         } else {
-                            BufferedImage bi = ImageIO.read(new ByteArrayInputStream(getRawData(continuedObjectData)));
-                            return new Rectangle2D.Double(bi.getMinX(), bi.getMinY(), bi.getWidth(), bi.getHeight());
+                            try(UnsynchronizedByteArrayInputStream is = new UnsynchronizedByteArrayInputStream(getRawData(continuedObjectData))) {
+                                BufferedImage bi = ImageIO.read(is);
+                                return new Rectangle2D.Double(bi.getMinX(), bi.getMinY(), bi.getWidth(), bi.getHeight());
+                            }
                         }
                     case METAFILE:
-                        ByteArrayInputStream bis = new ByteArrayInputStream(getRawData(continuedObjectData));
-                        switch (getMetafileType()) {
-                            case Wmf:
-                            case WmfPlaceable:
-                                HwmfPicture wmf = new HwmfPicture(bis);
-                                return wmf.getBounds();
-                            case Emf:
-                            case EmfPlusDual:
-                            case EmfPlusOnly:
-                                HemfPicture emf = new HemfPicture(bis);
-                                return emf.getBounds();
+                        try(UnsynchronizedByteArrayInputStream bis = new UnsynchronizedByteArrayInputStream(getRawData(continuedObjectData))) {
+                            switch (getMetafileType()) {
+                                case Wmf:
+                                case WmfPlaceable:
+                                    HwmfPicture wmf = new HwmfPicture(bis);
+                                    return wmf.getBounds();
+                                case Emf:
+                                case EmfPlusDual:
+                                case EmfPlusOnly:
+                                    HemfPicture emf = new HemfPicture(bis);
+                                    return emf.getBounds();
+                            }
                         }
                         break;
                     default:
index 0338ed62dc83feded8628d0a5c348c2dd55b5071..a5f797220eac6e6735989fa9fe5eb83ace15b0a5 100644 (file)
 
 package org.apache.poi.hmef.attribute;
 
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.poi.hmef.Attachment;
 import org.apache.poi.hmef.HMEFMessage;
 import org.apache.poi.hsmf.datatypes.MAPIProperty;
@@ -115,107 +115,108 @@ public class MAPIAttribute {
                "instead received a " + parent.getProperty() + " one"
          );
       }
-      ByteArrayInputStream inp = new ByteArrayInputStream(parent.getData());
-
-      // First up, get the number of attributes
-      int count = LittleEndian.readInt(inp);
-      List<MAPIAttribute> attrs = new ArrayList<>();
-
-      // Now, read each one in in turn
-      for(int i=0; i<count; i++) {
-         int typeAndMV = LittleEndian.readUShort(inp);
-         int id = LittleEndian.readUShort(inp);
-
-         // Is it either Multi-Valued or Variable-Length?
-         boolean isMV = false;
-         boolean isVL = false;
-         int typeId = typeAndMV;
-         if( (typeAndMV & Types.MULTIVALUED_FLAG) != 0 ) {
-            isMV = true;
-            typeId -= Types.MULTIVALUED_FLAG;
-         }
-         if(typeId == Types.ASCII_STRING.getId() || typeId == Types.UNICODE_STRING.getId() ||
-               typeId == Types.BINARY.getId() || typeId == Types.DIRECTORY.getId()) {
-            isVL = true;
-         }
-
-         // Turn the type ID into a strongly typed thing
-         MAPIType type = Types.getById(typeId);
-         if (type == null) {
-            type = Types.createCustom(typeId);
-         }
-
-         // If it's a named property, rather than a standard
-         //  MAPI property, grab the details of it
-         MAPIProperty prop = MAPIProperty.get(id);
-         if(id >= 0x8000 && id <= 0xFFFF) {
-            byte[] guid = new byte[16];
-            if (IOUtils.readFully(inp, guid) < 0) {
-               throw new IOException("Not enough data to read guid");
+      try(UnsynchronizedByteArrayInputStream inp = new UnsynchronizedByteArrayInputStream(parent.getData())) {
+         // First up, get the number of attributes
+         int count = LittleEndian.readInt(inp);
+         List<MAPIAttribute> attrs = new ArrayList<>();
+
+         // Now, read each one in in turn
+         for(int i=0; i<count; i++) {
+            int typeAndMV = LittleEndian.readUShort(inp);
+            int id = LittleEndian.readUShort(inp);
+
+            // Is it either Multi-Valued or Variable-Length?
+            boolean isMV = false;
+            boolean isVL = false;
+            int typeId = typeAndMV;
+            if( (typeAndMV & Types.MULTIVALUED_FLAG) != 0 ) {
+               isMV = true;
+               typeId -= Types.MULTIVALUED_FLAG;
             }
-            int mptype = LittleEndian.readInt(inp);
-
-            // Get the name of it
-            String name;
-            if(mptype == 0) {
-               // It's based on a normal one
-               int mpid = LittleEndian.readInt(inp);
-               MAPIProperty base = MAPIProperty.get(mpid);
-               name = base.name;
-            } else {
-               // Custom name was stored
-               int mplen = LittleEndian.readInt(inp);
-               byte[] mpdata = IOUtils.safelyAllocate(mplen, MAX_RECORD_LENGTH);
-               if (IOUtils.readFully(inp, mpdata) < 0) {
-                  throw new IOException("Not enough data to read " + mplen + " bytes for attribute name");
-               }
-               name = StringUtil.getFromUnicodeLE(mpdata, 0, (mplen/2)-1);
-               skipToBoundary(mplen, inp);
+            if(typeId == Types.ASCII_STRING.getId() || typeId == Types.UNICODE_STRING.getId() ||
+                    typeId == Types.BINARY.getId() || typeId == Types.DIRECTORY.getId()) {
+               isVL = true;
             }
 
-            // Now create
-            prop = MAPIProperty.createCustom(id, type, name);
-         }
-         if(prop == MAPIProperty.UNKNOWN) {
-            prop = MAPIProperty.createCustom(id, type, "(unknown " + Integer.toHexString(id) + ")");
-         }
+            // Turn the type ID into a strongly typed thing
+            MAPIType type = Types.getById(typeId);
+            if (type == null) {
+               type = Types.createCustom(typeId);
+            }
 
-         // Now read in the value(s)
-         int values = 1;
-         if(isMV || isVL) {
-            values = LittleEndian.readInt(inp);
-         }
+            // If it's a named property, rather than a standard
+            //  MAPI property, grab the details of it
+            MAPIProperty prop = MAPIProperty.get(id);
+            if(id >= 0x8000 && id <= 0xFFFF) {
+               byte[] guid = new byte[16];
+               if (IOUtils.readFully(inp, guid) < 0) {
+                  throw new IOException("Not enough data to read guid");
+               }
+               int mptype = LittleEndian.readInt(inp);
+
+               // Get the name of it
+               String name;
+               if(mptype == 0) {
+                  // It's based on a normal one
+                  int mpid = LittleEndian.readInt(inp);
+                  MAPIProperty base = MAPIProperty.get(mpid);
+                  name = base.name;
+               } else {
+                  // Custom name was stored
+                  int mplen = LittleEndian.readInt(inp);
+                  byte[] mpdata = IOUtils.safelyAllocate(mplen, MAX_RECORD_LENGTH);
+                  if (IOUtils.readFully(inp, mpdata) < 0) {
+                     throw new IOException("Not enough data to read " + mplen + " bytes for attribute name");
+                  }
+                  name = StringUtil.getFromUnicodeLE(mpdata, 0, (mplen/2)-1);
+                  skipToBoundary(mplen, inp);
+               }
 
-         if (type == Types.NULL && values > 1) {
-            throw new IOException("Placeholder/NULL arrays aren't supported.");
-         }
+               // Now create
+               prop = MAPIProperty.createCustom(id, type, name);
+            }
+            if(prop == MAPIProperty.UNKNOWN) {
+               prop = MAPIProperty.createCustom(id, type, "(unknown " + Integer.toHexString(id) + ")");
+            }
 
-         for(int j=0; j<values; j++) {
-            int len = getLength(type, inp);
-            byte[] data = IOUtils.safelyAllocate(len, MAX_RECORD_LENGTH);
-            if (IOUtils.readFully(inp, data) < 0) {
-               throw new IOException("Not enough data to read " + len + " bytes of attribute value");
+            // Now read in the value(s)
+            int values = 1;
+            if(isMV || isVL) {
+               values = LittleEndian.readInt(inp);
             }
-            skipToBoundary(len, inp);
-
-            // Create
-            MAPIAttribute attr;
-            if(type == Types.UNICODE_STRING || type == Types.ASCII_STRING) {
-               attr = new MAPIStringAttribute(prop, typeId, data);
-            } else if(type == Types.APP_TIME || type == Types.TIME) {
-               attr = new MAPIDateAttribute(prop, typeId, data);
-            } else if(id == MAPIProperty.RTF_COMPRESSED.id) {
-               attr = new MAPIRtfAttribute(prop, typeId, data);
-            } else {
-               attr = new MAPIAttribute(prop, typeId, data);
+
+            if (type == Types.NULL && values > 1) {
+               throw new IOException("Placeholder/NULL arrays aren't supported.");
+            }
+
+            for(int j=0; j<values; j++) {
+               int len = getLength(type, inp);
+               byte[] data = IOUtils.safelyAllocate(len, MAX_RECORD_LENGTH);
+               if (IOUtils.readFully(inp, data) < 0) {
+                  throw new IOException("Not enough data to read " + len + " bytes of attribute value");
+               }
+               skipToBoundary(len, inp);
+
+               // Create
+               MAPIAttribute attr;
+               if(type == Types.UNICODE_STRING || type == Types.ASCII_STRING) {
+                  attr = new MAPIStringAttribute(prop, typeId, data);
+               } else if(type == Types.APP_TIME || type == Types.TIME) {
+                  attr = new MAPIDateAttribute(prop, typeId, data);
+               } else if(id == MAPIProperty.RTF_COMPRESSED.id) {
+                  attr = new MAPIRtfAttribute(prop, typeId, data);
+               } else {
+                  attr = new MAPIAttribute(prop, typeId, data);
+               }
+               attrs.add(attr);
             }
-            attrs.add(attr);
          }
-      }
 
-      // All done
-      return attrs;
+         // All done
+         return attrs;
+      }
    }
+
    private static int getLength(MAPIType type, InputStream inp) throws IOException {
       if (type.isFixedLength()) {
          return type.getLength();
index 295a022db89977789631cb1f8f908e9f4f91ed42..f8b2d1952229f1c1f74da1f85add93c31426c563 100644 (file)
 
 package org.apache.poi.hmef.attribute;
 
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
+import java.io.InputStream;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.poi.hmef.Attachment;
 import org.apache.poi.hmef.CompressedRTF;
 import org.apache.poi.hmef.HMEFMessage;
@@ -44,7 +45,11 @@ public final class MAPIRtfAttribute extends MAPIAttribute {
 
       // Decompress it, removing any trailing padding as needed
       CompressedRTF rtf = new CompressedRTF();
-      byte[] tmp = rtf.decompress(new ByteArrayInputStream(data));
+      byte[] tmp;
+      try (InputStream is = new UnsynchronizedByteArrayInputStream(data)) {
+         tmp = rtf.decompress(is);
+      }
+
       if(tmp.length > rtf.getDeCompressedSize()) {
          this.decompressed = IOUtils.safelyClone(tmp, 0, rtf.getDeCompressedSize(), MAX_RECORD_LENGTH);
       } else {
index 26bfc0546ae35cffd2e6c3eb0bbb2e4806094dbd..05b22ccd00450f11d67973a6c08331db6d3714dc 100644 (file)
 
 package org.apache.poi.hwpf.usermodel;
 
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.util.Collections;
 import java.util.List;
 import java.util.zip.InflaterInputStream;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
@@ -140,8 +140,8 @@ public final class Picture {
          */
         if ( matchSignature( rawContent, COMPRESSED1, 32 )
                 || matchSignature( rawContent, COMPRESSED2, 32 ) ) {
-            try (ByteArrayInputStream bis = new ByteArrayInputStream( rawContent, 33, rawContent.length - 33 );
-                InflaterInputStream in = new InflaterInputStream(bis);
+            try (UnsynchronizedByteArrayInputStream bis = new UnsynchronizedByteArrayInputStream( rawContent, 33, rawContent.length - 33 );
+                 InflaterInputStream in = new InflaterInputStream(bis);
                  UnsynchronizedByteArrayOutputStream out = new UnsynchronizedByteArrayOutputStream()) {
 
                 IOUtils.copy(in, out);
index 27d91331186becbd24d3e7733d68d5da1b330922..00c892d1d71394bcbe66fd1518f42e230253d1e8 100644 (file)
 ==================================================================== */
 package org.apache.poi.poifs.storage;
 
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.util.Base64;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
 import org.apache.poi.util.HexRead;
 import org.apache.poi.util.IOUtils;
@@ -53,7 +53,7 @@ public final class RawDataUtil {
      */
     public static byte[] decompress(String data) throws IOException {
         byte[] base64Bytes = Base64.getDecoder().decode(data);
-        return IOUtils.toByteArray(new GZIPInputStream(new ByteArrayInputStream(base64Bytes)));
+        return IOUtils.toByteArray(new GZIPInputStream(new UnsynchronizedByteArrayInputStream(base64Bytes)));
     }
 
     /**
index 8ad04787043ba24e979751c2df7369ffe7a665e0..62b83ffaf98bc429c634dd7744dd563bee9c50ff 100644 (file)
@@ -17,7 +17,6 @@
 
 package org.apache.poi.ss.util;
 
-import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.File;
 import java.io.FileOutputStream;
@@ -27,6 +26,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
 
+import org.apache.commons.io.input.UnsynchronizedByteArrayInputStream;
 import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream;
 import org.apache.poi.hssf.usermodel.HSSFCell;
 import org.apache.poi.hssf.usermodel.HSSFCellStyle;
@@ -212,14 +212,12 @@ public class NumberRenderingSpreadsheetGenerator {
     }
 
     private static String interpretLong(byte[] fileContent, int offset) {
-        InputStream is = new ByteArrayInputStream(fileContent, offset, 8);
-        long l;
-        try {
-            l = new DataInputStream(is).readLong();
+        try (InputStream is = new UnsynchronizedByteArrayInputStream(fileContent, offset, 8)) {
+            long l = new DataInputStream(is).readLong();
+            return "0x" + Long.toHexString(l).toUpperCase(Locale.ROOT);
         } catch (IOException e) {
-            throw new RuntimeException(e);
+            throw new IllegalStateException("Problem in interpretLong", e);
         }
-        return "0x" + Long.toHexString(l).toUpperCase(Locale.ROOT);
     }
 
     private static boolean isNaNBytes(byte[] fileContent, int offset) {