/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.poifs.filesystem; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.apache.poi.EmptyFileException; import org.apache.poi.poifs.common.POIFSBigBlockSize; import org.apache.poi.poifs.common.POIFSConstants; import org.apache.poi.poifs.dev.POIFSViewable; import org.apache.poi.poifs.nio.ByteArrayBackedDataSource; import org.apache.poi.poifs.nio.DataSource; import org.apache.poi.poifs.nio.FileBackedDataSource; import org.apache.poi.poifs.property.DirectoryProperty; import org.apache.poi.poifs.property.DocumentProperty; import org.apache.poi.poifs.property.PropertyTable; import org.apache.poi.poifs.storage.BATBlock; import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex; import org.apache.poi.poifs.storage.HeaderBlock; import org.apache.poi.util.IOUtils; import org.apache.poi.util.Internal; import org.apache.poi.util.POILogFactory; import org.apache.poi.util.POILogger; /** *
This is the main class of the POIFS system; it manages the entire * life cycle of the filesystem.
*This is the new NIO version, which uses less memory
*/ public class POIFSFileSystem extends BlockStore implements POIFSViewable, Closeable { //arbitrarily selected; may need to increase private static final int MAX_RECORD_LENGTH = 100_000; private static final POILogger LOG = POILogFactory.getLogger(POIFSFileSystem.class); /** * Maximum number size (in blocks) of the allocation table as supported by * POI.
*
* This constant has been chosen to help POI identify corrupted data in the
* header block (rather than crash immediately with {@link OutOfMemoryError}
* ). It's not clear if the compound document format actually specifies any
* upper limits. For files with 512 byte blocks, having an allocation table
* of 65,335 blocks would correspond to a total file size of 4GB. Needless
* to say, POI probably cannot handle files anywhere near that size.
*/
private static final int MAX_BLOCK_COUNT = 65535;
private POIFSMiniStore _mini_store;
private PropertyTable _property_table;
private List Creates a POIFSFileSystem from a File. This uses less memory than
* creating from an InputStream. The File will be opened read-only Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying file closed, as the file is
* kept open during normal operation to read the data out. Creates a POIFSFileSystem from a File. This uses less memory than
* creating from an InputStream. Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying file closed, as the file is
* kept open during normal operation to read the data out. Creates a POIFSFileSystem from an open FileChannel. This uses
* less memory than creating from an InputStream. The stream will
* be used in read-only mode. Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying Channel closed, as the channel is
* kept open during normal operation to read the data out. Creates a POIFSFileSystem from an open FileChannel. This uses
* less memory than creating from an InputStream. Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying Channel closed, as the channel is
* kept open during normal operation to read the data out.
*
* Some streams are usable after reaching EOF (typically those that return true
* for markSupported()). In the unlikely case that the caller has such a stream
* and needs to use it after this constructor completes, a work around is to wrap the
* stream in order to trap the close() call. A convenience method (
* createNonClosingInputStream()) has been provided for this purpose:
*
* InputStream wrappedStream = POIFSFileSystem.createNonClosingInputStream(is);
* HSSFWorkbook wb = new HSSFWorkbook(wrappedStream);
* is.reset();
* doSomethingElse(is);
*
* Note also the special case of ByteArrayInputStream for which the close()
* method does nothing.
*
* ByteArrayInputStream bais = ...
* HSSFWorkbook wb = new HSSFWorkbook(bais); // calls bais.close() !
* bais.reset(); // no problem
* doSomethingElse(bais);
*
*
* @param stream the InputStream from which to read the data
*
* @exception IOException on errors reading, or on invalid data
*/
public POIFSFileSystem(InputStream stream)
throws IOException
{
this(false);
boolean success = false;
try (ReadableByteChannel channel = Channels.newChannel(stream)) {
// Turn our InputStream into something NIO based
// Get the header
ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
IOUtils.readFully(channel, headerBuffer);
// Have the header processed
_header = new HeaderBlock(headerBuffer);
// Sanity check the block count
sanityCheckBlockCount(_header.getBATCount());
// We need to buffer the whole file into memory when
// working with an InputStream.
// The max possible size is when each BAT block entry is used
long maxSize = BATBlock.calculateMaximumSize(_header);
if (maxSize > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unable read a >2gb file via an InputStream");
}
ByteBuffer data = ByteBuffer.allocate((int) maxSize);
// Copy in the header
headerBuffer.position(0);
data.put(headerBuffer);
data.position(headerBuffer.capacity());
// Now read the rest of the stream
IOUtils.readFully(channel, data);
success = true;
// Turn it into a DataSource
_data = new ByteArrayBackedDataSource(data.array(), data.position());
} finally {
// As per the constructor contract, always close the stream
closeInputStream(stream, success);
}
// Now process the various entries
readCoreContents();
}
/**
* @param stream the stream to be closed
* @param success false
if an exception is currently being thrown in the calling method
*/
private void closeInputStream(InputStream stream, boolean success) {
try {
stream.close();
} catch (IOException e) {
if(success) {
throw new RuntimeException(e);
}
// else not success? Try block did not complete normally
// just print stack trace and leave original ex to be thrown
LOG.log(POILogger.ERROR, "can't close input stream", e);
}
}
/**
* Read and process the PropertiesTable and the
* FAT / XFAT blocks, so that we're ready to
* work with the file
*/
private void readCoreContents() throws IOException {
// Grab the block size
bigBlockSize = _header.getBigBlockSize();
// Each block should only ever be used by one of the
// FAT, XFAT or Property Table. Ensure it does
ChainLoopDetector loopDetector = getChainLoopDetector();
// Read the FAT blocks
for(int fatAt : _header.getBATArray()) {
readBAT(fatAt, loopDetector);
}
// Work out how many FAT blocks remain in the XFATs
int remainingFATs = _header.getBATCount() - _header.getBATArray().length;
// Now read the XFAT blocks, and the FATs within them
BATBlock xfat;
int nextAt = _header.getXBATIndex();
for(int i=0; i<_header.getXBATCount(); i++) {
loopDetector.claim(nextAt);
ByteBuffer fatData = getBlockAt(nextAt);
xfat = BATBlock.createBATBlock(bigBlockSize, fatData);
xfat.setOurBlockIndex(nextAt);
nextAt = xfat.getValueAt(bigBlockSize.getXBATEntriesPerBlock());
_xbat_blocks.add(xfat);
// Process all the (used) FATs from this XFAT
int xbatFATs = Math.min(remainingFATs, bigBlockSize.getXBATEntriesPerBlock());
for(int j=0; j