import org.apache.poi.hpsf.PropertySetFactory;
import org.apache.poi.hpsf.SummaryInformation;
import org.apache.poi.poifs.filesystem.DirectoryEntry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
* <p>This is a sample application showing how to easily modify properties in
File summaryFile = new File(args[0]);
/* Open the POI filesystem. */
- try (NPOIFSFileSystem poifs = new NPOIFSFileSystem(summaryFile, false)) {
+ try (POIFSFileSystem poifs = new POIFSFileSystem(summaryFile, false)) {
/* Read the summary information. */
DirectoryEntry dir = poifs.getRoot();
/* Insert some custom properties into the container. */
customProperties.put("Key 1", "Value 1");
customProperties.put("Schl\u00fcssel 2", "Wert 2");
- customProperties.put("Sample Number", new Integer(12345));
+ customProperties.put("Sample Number", 12345);
customProperties.put("Sample Boolean", Boolean.TRUE);
customProperties.put("Sample Date", new Date());
import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIEncryptor;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.Internal;
this.directory = dir;
}
- /**
- * Constructs from an old-style OPOIFS
- *
- * @param fs the filesystem the document is read from
- */
- protected POIDocument(NPOIFSFileSystem fs) {
- this(fs.getRoot());
- }
/**
* Constructs from the default POIFS
*
protected PropertySet getPropertySet(String setName, EncryptionInfo encryptionInfo) throws IOException {
DirectoryNode dirNode = directory;
- NPOIFSFileSystem encPoifs = null;
+ POIFSFileSystem encPoifs = null;
String step = "getting";
try {
if (encryptionInfo != null && encryptionInfo.isDocPropsEncrypted()) {
* into the currently open NPOIFSFileSystem
*
* @throws IOException if an error when writing to the open
- * {@link NPOIFSFileSystem} occurs
+ * {@link POIFSFileSystem} occurs
*/
protected void writeProperties() throws IOException {
validateInPlaceWritePossible();
* @param outFS the POIFSFileSystem to write the properties into
*
* @throws IOException if an error when writing to the
- * {@link NPOIFSFileSystem} occurs
+ * {@link POIFSFileSystem} occurs
*/
- protected void writeProperties(NPOIFSFileSystem outFS) throws IOException {
+ protected void writeProperties(POIFSFileSystem outFS) throws IOException {
writeProperties(outFS, null);
}
/**
* @param writtenEntries a list of POIFS entries to add the property names too
*
* @throws IOException if an error when writing to the
- * {@link NPOIFSFileSystem} occurs
+ * {@link POIFSFileSystem} occurs
*/
- protected void writeProperties(NPOIFSFileSystem outFS, List<String> writtenEntries) throws IOException {
+ protected void writeProperties(POIFSFileSystem outFS, List<String> writtenEntries) throws IOException {
final EncryptionInfo ei = getEncryptionInfo();
final boolean encryptProps = (ei != null && ei.isDocPropsEncrypted());
- try (NPOIFSFileSystem tmpFS = new NPOIFSFileSystem()) {
- final NPOIFSFileSystem fs = (encryptProps) ? tmpFS : outFS;
+ try (POIFSFileSystem tmpFS = new POIFSFileSystem()) {
+ final POIFSFileSystem fs = (encryptProps) ? tmpFS : outFS;
writePropertySet(SummaryInformation.DEFAULT_STREAM_NAME, getSummaryInformation(), fs, writtenEntries);
writePropertySet(DocumentSummaryInformation.DEFAULT_STREAM_NAME, getDocumentSummaryInformation(), fs, writtenEntries);
}
}
- private void writePropertySet(String name, PropertySet ps, NPOIFSFileSystem outFS, List<String> writtenEntries)
+ private void writePropertySet(String name, PropertySet ps, POIFSFileSystem outFS, List<String> writtenEntries)
throws IOException {
if (ps == null) {
return;
* @param outFS the NPOIFSFileSystem to write the property into
*
* @throws IOException if an error when writing to the
- * {@link NPOIFSFileSystem} occurs
+ * {@link POIFSFileSystem} occurs
*/
- private void writePropertySet(String name, PropertySet set, NPOIFSFileSystem outFS) throws IOException {
+ private void writePropertySet(String name, PropertySet set, POIFSFileSystem outFS) throws IOException {
try {
PropertySet mSet = new PropertySet(set);
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
public abstract void write(OutputStream out) throws IOException;
/**
- * Closes the underlying {@link NPOIFSFileSystem} from which
+ * Closes the underlying {@link POIFSFileSystem} from which
* the document was read, if any. Has no effect on documents
* opened from an InputStream, or newly created ones.<p>
*
@Internal
protected boolean initDirectory() {
if (directory == null) {
- directory = new NPOIFSFileSystem().getRoot(); // NOSONAR
+ directory = new POIFSFileSystem().getRoot(); // NOSONAR
return true;
}
return false;
import java.io.OutputStream;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
protected POIReadOnlyDocument(DirectoryNode dir) {
super(dir);
}
- protected POIReadOnlyDocument(NPOIFSFileSystem fs) {
+ protected POIReadOnlyDocument(POIFSFileSystem fs) {
super(fs);
}
import org.apache.poi.poifs.filesystem.DirectoryEntry;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.Entry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;
* <p>Note 3 - rather than using this, for most cases you would be better
* off switching to <a href="http://tika.apache.org">Apache Tika</a> instead!</p>
*/
-@SuppressWarnings("WeakerAccess")
+@SuppressWarnings({"WeakerAccess", "JavadocReference"})
public final class OLE2ExtractorFactory {
private static final POILogger LOGGER = POILogFactory.getLogger(OLE2ExtractorFactory.class);
public static <T extends POITextExtractor> T createExtractor(POIFSFileSystem fs) throws IOException {
return (T)createExtractor(fs.getRoot());
}
- @SuppressWarnings("unchecked")
- public static <T extends POITextExtractor> T createExtractor(NPOIFSFileSystem fs) throws IOException {
- return (T)createExtractor(fs.getRoot());
- }
@SuppressWarnings("unchecked")
public static <T extends POITextExtractor> T createExtractor(InputStream input) throws IOException {
}
} else {
// Best hope it's OLE2....
- return createExtractor(new NPOIFSFileSystem(input));
+ return createExtractor(new POIFSFileSystem(input));
}
}
import org.apache.poi.POIDocument;
import org.apache.poi.poifs.filesystem.EntryUtils;
import org.apache.poi.poifs.filesystem.FilteringDirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
* without affecting the rest of the file
*/
public class HPSFPropertiesOnlyDocument extends POIDocument {
- public HPSFPropertiesOnlyDocument(NPOIFSFileSystem fs) {
- super(fs.getRoot());
- }
public HPSFPropertiesOnlyDocument(POIFSFileSystem fs) {
super(fs);
}
* Write out to the currently open file the properties changes, but nothing else
*/
public void write() throws IOException {
- NPOIFSFileSystem fs = getDirectory().getFileSystem();
+ POIFSFileSystem fs = getDirectory().getFileSystem();
validateInPlaceWritePossible();
writeProperties(fs, null);
* Write out, with any properties changes, but nothing else
*/
public void write(OutputStream out) throws IOException {
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem()) {
+ try (POIFSFileSystem fs = new POIFSFileSystem()) {
write(fs);
fs.writeFilesystem(out);
}
}
- private void write(NPOIFSFileSystem fs) throws IOException {
+ private void write(POIFSFileSystem fs) throws IOException {
// For tracking what we've written out, so far
List<String> excepts = new ArrayList<>(2);
import org.apache.poi.hpsf.PropertySet;
import org.apache.poi.hpsf.SummaryInformation;
import org.apache.poi.hpsf.wellknown.PropertyIDMap;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
public HPSFPropertiesExtractor(POIFSFileSystem fs) {
super(new HPSFPropertiesOnlyDocument(fs));
}
- public HPSFPropertiesExtractor(NPOIFSFileSystem fs) {
- super(new HPSFPropertiesOnlyDocument(fs));
- }
public String getDocumentSummaryInformationText() {
if(document == null) { // event based extractor does not have a document
public static void main(String[] args) throws IOException {
for (String file : args) {
try (HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(
- new NPOIFSFileSystem(new File(file)))) {
+ new POIFSFileSystem(new File(file)))) {
System.out.println(ext.getText());
}
}
import java.io.DataInputStream;
import java.io.File;
-import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.poi.hssf.record.pivottable.ViewFieldsRecord;
import org.apache.poi.hssf.record.pivottable.ViewSourceRecord;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
* @param recListener the record listener to notify about read records
* @param dumpInterpretedRecords if {@code true}, the read records will be written to the PrintWriter
*
- * @return an array of Records created from the InputStream
* @exception org.apache.poi.util.RecordFormatException on error processing the InputStream
*/
- public static Record[] createRecords(InputStream is, PrintWriter ps, BiffRecordListener recListener, boolean dumpInterpretedRecords)
+ private static void createRecords(InputStream is, PrintWriter ps, BiffRecordListener recListener, boolean dumpInterpretedRecords)
throws org.apache.poi.util.RecordFormatException {
- List<Record> temp = new ArrayList<>();
-
RecordInputStream recStream = new RecordInputStream(is);
while (true) {
boolean hasNext;
if (record.getSid() == ContinueRecord.sid) {
continue;
}
- temp.add(record);
for (String header : recListener.getRecentHeaders()) {
ps.println(header);
}
ps.println();
}
- Record[] result = new Record[temp.size()];
- temp.toArray(result);
- return result;
}
}
return new CommandArgs(biffhex, noint, out, rawhex, noheader, file);
}
- public boolean shouldDumpBiffHex() {
+ boolean shouldDumpBiffHex() {
return _biffhex;
}
- public boolean shouldDumpRecordInterpretations() {
+ boolean shouldDumpRecordInterpretations() {
return !_noint;
}
- public boolean shouldOutputToFile() {
+ boolean shouldOutputToFile() {
return _out;
}
- public boolean shouldOutputRawHexOnly() {
+ boolean shouldOutputRawHexOnly() {
return _rawhex;
}
- public boolean suppressHeader() {
+ boolean suppressHeader() {
return _noHeader;
}
public File getFile() {
}
}
private static final class CommandParseException extends Exception {
- public CommandParseException(String msg) {
+ CommandParseException(String msg) {
super(msg);
}
}
pw = new PrintWriter(new OutputStreamWriter(System.out, Charset.defaultCharset()));
}
- NPOIFSFileSystem fs = null;
+ POIFSFileSystem fs = null;
InputStream is = null;
try {
- fs = new NPOIFSFileSystem(cmdArgs.getFile(), true);
+ fs = new POIFSFileSystem(cmdArgs.getFile(), true);
is = getPOIFSInputStream(fs);
if (cmdArgs.shouldOutputRawHexOnly()) {
}
}
- protected static InputStream getPOIFSInputStream(NPOIFSFileSystem fs)
- throws IOException, FileNotFoundException {
+ static InputStream getPOIFSInputStream(POIFSFileSystem fs) throws IOException {
String workbookName = HSSFWorkbook.getWorkbookDirEntryName(fs.getRoot());
return fs.createDocumentInputStream(workbookName);
}
- protected static void runBiffViewer(PrintWriter pw, InputStream is,
+ static void runBiffViewer(PrintWriter pw, InputStream is,
boolean dumpInterpretedRecords, boolean dumpHex, boolean zeroAlignHexDump,
boolean suppressHeader) {
BiffRecordListener recListener = new BiffRecordListener(dumpHex ? pw : null, zeroAlignHexDump, suppressHeader);
private List<String> _headers;
private final boolean _zeroAlignEachRecord;
private final boolean _noHeader;
- public BiffRecordListener(Writer hexDumpWriter, boolean zeroAlignEachRecord, boolean noHeader) {
+ private BiffRecordListener(Writer hexDumpWriter, boolean zeroAlignEachRecord, boolean noHeader) {
_hexDumpWriter = hexDumpWriter;
_zeroAlignEachRecord = zeroAlignEachRecord;
_noHeader = noHeader;
}
}
}
- public List<String> getRecentHeaders() {
+ private List<String> getRecentHeaders() {
List<String> result = _headers;
_headers = new ArrayList<>();
return result;
private int _currentSize;
private boolean _innerHasReachedEOF;
- public BiffDumpingStream(InputStream is, IBiffRecordListener listener) {
+ private BiffDumpingStream(InputStream is, IBiffRecordListener listener) {
_is = new DataInputStream(is);
_listener = listener;
_data = new byte[RecordInputStream.MAX_RECORD_DATA_SIZE + 4];
* @param globalOffset (somewhat arbitrary) used to calculate the addresses printed at the
* start of each line
*/
- static void hexDumpAligned(Writer w, byte[] data, int dumpLen, int globalOffset,
+ private static void hexDumpAligned(Writer w, byte[] data, int dumpLen, int globalOffset,
boolean zeroAlignEachRecord) {
int baseDataOffset = 0;
return ib;
}
- private static void writeHex(char buf[], int startInBuf, int value, int nDigits) throws IOException {
+ private static void writeHex(char buf[], int startInBuf, int value, int nDigits) {
int acc = value;
for(int i=nDigits-1; i>=0; i--) {
int digit = acc & 0x0F;
import java.io.InputStream;
import org.apache.poi.hssf.eventusermodel.HSSFEventFactory;
-import org.apache.poi.hssf.eventusermodel.HSSFListener;
import org.apache.poi.hssf.eventusermodel.HSSFRequest;
-import org.apache.poi.hssf.record.Record;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
*
/** Creates a new instance of EFBiffViewer */
- public EFBiffViewer()
- {
+ @SuppressWarnings("WeakerAccess")
+ public EFBiffViewer() {
}
public void run() throws IOException {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(file), true);
- try {
- InputStream din = BiffViewer.getPOIFSInputStream(fs);
- try {
- HSSFRequest req = new HSSFRequest();
-
- req.addListenerForAllRecords(new HSSFListener()
- {
- public void processRecord(Record rec)
- {
- System.out.println(rec);
- }
- });
- HSSFEventFactory factory = new HSSFEventFactory();
-
- factory.processEvents(req, din);
- } finally {
- din.close();
- }
- } finally {
- fs.close();
+ try (POIFSFileSystem fs = new POIFSFileSystem(new File(file), true);
+ InputStream din = BiffViewer.getPOIFSInputStream(fs)) {
+ HSSFRequest req = new HSSFRequest();
+
+ req.addListenerForAllRecords(System.out::println);
+ HSSFEventFactory factory = new HSSFEventFactory();
+
+ factory.processEvents(req, din);
}
}
import org.apache.poi.hssf.record.FormulaRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordFactory;
-import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.formula.ptg.ExpPtg;
import org.apache.poi.ss.formula.ptg.FuncPtg;
import org.apache.poi.ss.formula.ptg.Ptg;
* @throws IOException if the file contained errors
*/
public void run() throws IOException {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(file), true);
+ POIFSFileSystem fs = new POIFSFileSystem(new File(file), true);
try {
InputStream is = BiffViewer.getPOIFSInputStream(fs);
try {
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordFactory;
import org.apache.poi.hssf.record.RecordInputStream;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
* This is a low-level debugging class, which simply prints
{
}
- public void run()
- throws IOException
- {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(file), true);
- try {
- InputStream din = BiffViewer.getPOIFSInputStream(fs);
- try {
- RecordInputStream rinp = new RecordInputStream(din);
-
- while(rinp.hasNextRecord()) {
- int sid = rinp.getNextSid();
- rinp.nextRecord();
-
- int size = rinp.available();
- Class<? extends Record> clz = RecordFactory.getRecordClass(sid);
-
- System.out.print(
- formatSID(sid) +
- " - " +
- formatSize(size) +
- " bytes"
- );
- if(clz != null) {
- System.out.print(" \t");
- System.out.print(clz.getName().replace("org.apache.poi.hssf.record.", ""));
- }
- System.out.println();
-
- byte[] data = rinp.readRemainder();
- if(data.length > 0) {
- System.out.print(" ");
- System.out.println( formatData(data) );
- }
+ public void run() throws IOException {
+ try (POIFSFileSystem fs = new POIFSFileSystem(new File(file), true);
+ InputStream din = BiffViewer.getPOIFSInputStream(fs)) {
+ RecordInputStream rinp = new RecordInputStream(din);
+
+ while (rinp.hasNextRecord()) {
+ int sid = rinp.getNextSid();
+ rinp.nextRecord();
+
+ int size = rinp.available();
+ Class<? extends Record> clz = RecordFactory.getRecordClass(sid);
+
+ System.out.print(
+ formatSID(sid) +
+ " - " +
+ formatSize(size) +
+ " bytes"
+ );
+ if (clz != null) {
+ System.out.print(" \t");
+ System.out.print(clz.getName().replace("org.apache.poi.hssf.record.", ""));
+ }
+ System.out.println();
+
+ byte[] data = rinp.readRemainder();
+ if (data.length > 0) {
+ System.out.print(" ");
+ System.out.println(formatData(data));
}
- } finally {
- din.close();
}
- } finally {
- fs.close();
}
}
String hex = Integer.toHexString(sid);
String dec = Integer.toString(sid);
- StringBuffer s = new StringBuffer();
+ StringBuilder s = new StringBuilder();
s.append("0x");
for(int i=hex.length(); i<4; i++) {
s.append('0');
String hex = Integer.toHexString(size);
String dec = Integer.toString(size);
- StringBuffer s = new StringBuffer();
+ StringBuilder s = new StringBuilder();
for(int i=hex.length(); i<3; i++) {
s.append('0');
}
return "";
// If possible, do first 4 and last 4 bytes
- StringBuffer s = new StringBuffer();
+ StringBuilder s = new StringBuilder();
if(data.length > 9) {
s.append(byteToHex(data[0]));
s.append(' ');
s.append(' ');
s.append(byteToHex(data[data.length-1]));
} else {
- for(int i=0; i<data.length; i++) {
- s.append(byteToHex(data[i]));
- s.append(' ');
- }
+ for (byte aData : data) {
+ s.append(byteToHex(aData));
+ s.append(' ');
+ }
}
return s.toString();
}
public OldExcelExtractor(File f) throws IOException {
- NPOIFSFileSystem poifs = null;
+ POIFSFileSystem poifs = null;
try {
- poifs = new NPOIFSFileSystem(f);
+ poifs = new POIFSFileSystem(f);
open(poifs);
toClose = poifs;
return;
}
}
- public OldExcelExtractor(NPOIFSFileSystem fs) throws IOException {
+ public OldExcelExtractor(POIFSFileSystem fs) throws IOException {
open(fs);
}
: new BufferedInputStream(biffStream, 8);
if (FileMagic.valueOf(bis) == FileMagic.OLE2) {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(bis);
+ POIFSFileSystem poifs = new POIFSFileSystem(bis);
try {
open(poifs);
toClose = poifs; // Fixed by GR, we should not close it here
}
}
- private void open(NPOIFSFileSystem fs) throws IOException {
+ private void open(POIFSFileSystem fs) throws IOException {
open(fs.getRoot());
}
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.EntryUtils;
import org.apache.poi.poifs.filesystem.FilteringDirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSDocument;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
-import org.apache.poi.poifs.filesystem.Ole10Native;
+import org.apache.poi.poifs.filesystem.POIFSDocument;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
+import org.apache.poi.poifs.filesystem.Ole10Native;
import org.apache.poi.ss.SpreadsheetVersion;
import org.apache.poi.ss.formula.FormulaShifter;
import org.apache.poi.ss.formula.FormulaType;
* @see org.apache.poi.hssf.model.InternalWorkbook
* @see org.apache.poi.hssf.usermodel.HSSFSheet
*/
+@SuppressWarnings("WeakerAccess")
public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss.usermodel.Workbook {
//arbitrarily selected; may need to increase
public HSSFWorkbook(POIFSFileSystem fs) throws IOException {
this(fs,true);
}
- /**
- * Given a POI POIFSFileSystem object, read in its Workbook along
- * with all related nodes, and populate the high and low level models.
- * <p>This calls {@link #HSSFWorkbook(POIFSFileSystem, boolean)} with
- * preserve nodes set to true.
- *
- * @see #HSSFWorkbook(POIFSFileSystem, boolean)
- * @see org.apache.poi.poifs.filesystem.POIFSFileSystem
- * @exception IOException if the stream cannot be read
- */
- public HSSFWorkbook(NPOIFSFileSystem fs) throws IOException {
- this(fs.getRoot(),true);
- }
/**
* Given a POI POIFSFileSystem object, read in its Workbook and populate
public HSSFWorkbook(InputStream s, boolean preserveNodes)
throws IOException
{
- this(new NPOIFSFileSystem(s).getRoot(), preserveNodes);
+ this(new POIFSFileSystem(s).getRoot(), preserveNodes);
}
/**
HSSFName getBuiltInName(byte builtinCode, int sheetIndex) {
int index = findExistingBuiltinNameRecordIdx(sheetIndex, builtinCode);
- if (index < 0) {
- return null;
- } else {
- return names.get(index);
- }
+ return (index < 0) ? null : names.get(index);
}
// So we don't confuse users, give them back
// the same object every time, but create
// them lazily
- Integer sIdx = Integer.valueOf(idx);
+ Integer sIdx = idx;
if(fonts.containsKey(sIdx)) {
return fonts.get(sIdx);
}
* Should only be called after deleting fonts,
* and that's not something you should normally do
*/
- protected void resetFontCache() {
+ void resetFontCache() {
fonts = new HashMap<>();
}
}
/**
- * Closes the underlying {@link NPOIFSFileSystem} from which
+ * Closes the underlying {@link POIFSFileSystem} from which
* the Workbook was read, if any.
*
* <p>Once this has been called, no further
// Update the Workbook stream in the file
DocumentNode workbookNode = (DocumentNode)dir.getEntry(
getWorkbookDirEntryName(dir));
- NPOIFSDocument workbookDoc = new NPOIFSDocument(workbookNode);
+ POIFSDocument workbookDoc = new POIFSDocument(workbookNode);
workbookDoc.replaceContents(new ByteArrayInputStream(getBytes()));
// Update the properties streams in the file
*/
@Override
public void write(OutputStream stream) throws IOException {
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem()) {
+ try (POIFSFileSystem fs = new POIFSFileSystem()) {
write(fs);
fs.writeFilesystem(stream);
}
}
/** Writes the workbook out to a brand new, empty POIFS */
- private void write(NPOIFSFileSystem fs) throws IOException {
+ private void write(POIFSFileSystem fs) throws IOException {
// For tracking what we've written out, used if we're
// going to be preserving nodes
List<String> excepts = new ArrayList<>(1);
}
@SuppressWarnings("resource")
- protected void encryptBytes(byte buf[]) {
+ void encryptBytes(byte buf[]) {
EncryptionInfo ei = getEncryptionInfo();
if (ei == null) {
return;
ChunkedCipherOutputStream os = enc.getDataStream(leos, initialOffset);
int totalBytes = 0;
while (totalBytes < buf.length) {
- plain.read(tmp, 0, 4);
+ IOUtils.readFully(plain, tmp, 0, 4);
final int sid = LittleEndian.getUShort(tmp, 0);
final int len = LittleEndian.getUShort(tmp, 2);
boolean isPlain = Biff8DecryptingStream.isNeverEncryptedRecord(sid);
/**
* Spits out a list of all the drawing records in the workbook.
*/
- public void dumpDrawingGroupRecords(boolean fat)
- {
+ public void dumpDrawingGroupRecords(boolean fat) {
DrawingGroupRecord r = (DrawingGroupRecord) workbook.findFirstRecordBySid( DrawingGroupRecord.sid );
+ if (r == null) {
+ return;
+ }
r.decode();
List<EscherRecord> escherRecords = r.getEscherRecords();
PrintWriter w = new PrintWriter(new OutputStreamWriter(System.out, Charset.defaultCharset()));
}
- protected static Map<String,ClassID> getOleMap() {
+ static Map<String,ClassID> getOleMap() {
Map<String,ClassID> olemap = new HashMap<>();
olemap.put("PowerPoint Document", ClassIDPredefined.POWERPOINT_V8.getClassID());
for (String str : WORKBOOK_DIR_ENTRY_NAMES) {
import java.io.IOException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.apache.poi.util.Internal;
* Helper class which is instantiated by reflection from
* {@link WorkbookFactory#create(java.io.File)} and similar
*/
+@SuppressWarnings("unused")
@Internal
public class HSSFWorkbookFactory extends WorkbookFactory {
/**
* Note that in order to properly release resources the
* Workbook should be closed after use.
*/
- public static HSSFWorkbook createWorkbook(final NPOIFSFileSystem fs) throws IOException {
+ public static HSSFWorkbook createWorkbook(final POIFSFileSystem fs) throws IOException {
return new HSSFWorkbook(fs);
}
@Override
public void processPOIFSWriterEvent(POIFSWriterEvent event) {
try {
- OutputStream os = event.getStream();
-
- // StreamSize (8 bytes): An unsigned integer that specifies the number of bytes used by data
- // encrypted within the EncryptedData field, not including the size of the StreamSize field.
- // Note that the actual size of the \EncryptedPackage stream (1) can be larger than this
- // value, depending on the block size of the chosen encryption algorithm
- byte buf[] = new byte[LittleEndianConsts.LONG_SIZE];
- LittleEndian.putLong(buf, 0, pos);
- os.write(buf);
-
- FileInputStream fis = new FileInputStream(fileOut);
- try {
+ try (OutputStream os = event.getStream();
+ FileInputStream fis = new FileInputStream(fileOut)) {
+
+ // StreamSize (8 bytes): An unsigned integer that specifies the number of bytes used by data
+ // encrypted within the EncryptedData field, not including the size of the StreamSize field.
+ // Note that the actual size of the \EncryptedPackage stream (1) can be larger than this
+ // value, depending on the block size of the chosen encryption algorithm
+ byte buf[] = new byte[LittleEndianConsts.LONG_SIZE];
+ LittleEndian.putLong(buf, 0, pos);
+ os.write(buf);
+
IOUtils.copy(fis, os);
- } finally {
- fis.close();
}
- os.close();
-
if (!fileOut.delete()) {
LOG.log(POILogger.ERROR, "Can't delete temporary encryption file: "+fileOut);
}
import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
public abstract class Decryptor implements Cloneable {
return d;
}
- public InputStream getDataStream(NPOIFSFileSystem fs) throws IOException, GeneralSecurityException {
- return getDataStream(fs.getRoot());
- }
-
public InputStream getDataStream(POIFSFileSystem fs) throws IOException, GeneralSecurityException {
return getDataStream(fs.getRoot());
}
-
+
// for tests
public byte[] getVerifier() {
return verifier;
import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
this(fs.getRoot());
}
- /**
- * Opens for decryption
- */
- public EncryptionInfo(NPOIFSFileSystem fs) throws IOException {
- this(fs.getRoot());
- }
-
/**
* Opens for decryption
*/
* @throws IllegalAccessException if the builder class can't be loaded
* @throws InstantiationException if the builder class can't be loaded
*/
- @SuppressWarnings("WeakerAccess")
+ @SuppressWarnings({"WeakerAccess", "JavadocReference"})
protected static EncryptionInfoBuilder getBuilder(EncryptionMode encryptionMode)
throws ClassNotFoundException, IllegalAccessException, InstantiationException {
ClassLoader cl = EncryptionInfo.class.getClassLoader();
import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
public abstract class Encryptor implements Cloneable {
return info.getEncryptor();
}
- public OutputStream getDataStream(NPOIFSFileSystem fs) throws IOException, GeneralSecurityException {
- return getDataStream(fs.getRoot());
- }
public OutputStream getDataStream(POIFSFileSystem fs) throws IOException, GeneralSecurityException {
return getDataStream(fs.getRoot());
}
import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.crypt.ChunkedCipherOutputStream;
import org.apache.poi.poifs.crypt.CryptoFunctions;
-import org.apache.poi.poifs.crypt.DataSpaceMapUtils;
import org.apache.poi.poifs.crypt.EncryptionInfo;
import org.apache.poi.poifs.crypt.Encryptor;
import org.apache.poi.poifs.crypt.HashAlgorithm;
import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIDecryptor.StreamDescriptorEntry;
-import org.apache.poi.poifs.crypt.standard.EncryptionRecord;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.Entry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
-import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.StringUtil;
public class CryptoAPIEncryptor extends Encryptor implements Cloneable {
private int chunkSize = 512;
- protected CryptoAPIEncryptor() {
+ CryptoAPIEncryptor() {
}
@Override
* @param cipher may be null, otherwise the given instance is reset to the new block index
* @param block the block index, e.g. the persist/slide id (hslf)
* @return a new cipher object, if cipher was null, otherwise the reinitialized cipher
- * @throws GeneralSecurityException
+ * @throws GeneralSecurityException when the cipher can't be initialized
*/
public Cipher initCipherForBlock(Cipher cipher, int block)
throws GeneralSecurityException {
}
@Override
- public ChunkedCipherOutputStream getDataStream(DirectoryNode dir)
- throws IOException, GeneralSecurityException {
+ public ChunkedCipherOutputStream getDataStream(DirectoryNode dir) throws IOException {
throw new IOException("not supported");
}
*
* @see <a href="http://msdn.microsoft.com/en-us/library/dd943321(v=office.12).aspx">2.3.5.4 RC4 CryptoAPI Encrypted Summary Stream</a>
*/
- public void setSummaryEntries(DirectoryNode dir, String encryptedStream, NPOIFSFileSystem entries)
+ public void setSummaryEntries(DirectoryNode dir, String encryptedStream, POIFSFileSystem entries)
throws IOException, GeneralSecurityException {
CryptoAPIDocumentOutputStream bos = new CryptoAPIDocumentOutputStream(this); // NOSONAR
byte buf[] = new byte[8];
dir.createDocument(encryptedStream, new ByteArrayInputStream(bos.getBuf(), 0, savedSize));
}
- protected int getKeySizeInBytes() {
- return getEncryptionInfo().getHeader().getKeySize() / 8;
- }
+// protected int getKeySizeInBytes() {
+// return getEncryptionInfo().getHeader().getKeySize() / 8;
+// }
@Override
public void setChunkSize(int chunkSize) {
this.chunkSize = chunkSize;
}
- protected void createEncryptionInfoEntry(DirectoryNode dir) throws IOException {
- DataSpaceMapUtils.addDefaultDataSpace(dir);
- final EncryptionInfo info = getEncryptionInfo();
- final CryptoAPIEncryptionHeader header = (CryptoAPIEncryptionHeader)getEncryptionInfo().getHeader();
- final CryptoAPIEncryptionVerifier verifier = (CryptoAPIEncryptionVerifier)getEncryptionInfo().getVerifier();
- EncryptionRecord er = new EncryptionRecord() {
- @Override
- public void write(LittleEndianByteArrayOutputStream bos) {
- bos.writeShort(info.getVersionMajor());
- bos.writeShort(info.getVersionMinor());
- header.write(bos);
- verifier.write(bos);
- }
- };
- DataSpaceMapUtils.createEncryptionEntry(dir, "EncryptionInfo", er);
- }
-
-
@Override
public CryptoAPIEncryptor clone() throws CloneNotSupportedException {
return (CryptoAPIEncryptor)super.clone();
}
@Override
- protected void createEncryptionInfoEntry(DirectoryNode dir, File tmpFile)
- throws IOException, GeneralSecurityException {
+ protected void createEncryptionInfoEntry(DirectoryNode dir, File tmpFile) {
throw new EncryptedDocumentException("createEncryptionInfoEntry not supported");
}
- public CryptoAPICipherOutputStream(OutputStream stream)
+ CryptoAPICipherOutputStream(OutputStream stream)
throws IOException, GeneralSecurityException {
super(stream, CryptoAPIEncryptor.this.chunkSize);
}
import javax.crypto.Cipher;
import javax.crypto.spec.SecretKeySpec;
+import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.crypt.ChunkedCipherOutputStream;
import org.apache.poi.poifs.crypt.CryptoFunctions;
import org.apache.poi.poifs.crypt.Encryptor;
// chunkSize is irrelevant
}
- protected void createEncryptionInfoEntry(DirectoryNode dir) throws IOException {
- }
-
@Override
public XOREncryptor clone() throws CloneNotSupportedException {
return (XOREncryptor)super.clone();
}
@Override
- protected void createEncryptionInfoEntry(DirectoryNode dir, File tmpFile)
- throws IOException, GeneralSecurityException {
- XOREncryptor.this.createEncryptionInfoEntry(dir);
+ protected void createEncryptionInfoEntry(DirectoryNode dir, File tmpFile) {
+ throw new EncryptedDocumentException("createEncryptionInfoEntry not supported");
}
@Override
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
-import org.apache.poi.poifs.filesystem.NPOIFSStream;
-import org.apache.poi.poifs.property.NPropertyTable;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSStream;
+import org.apache.poi.poifs.property.PropertyTable;
import org.apache.poi.poifs.storage.HeaderBlock;
import org.apache.poi.util.IOUtils;
/**
* Dump internal structure of a OLE2 file into file system
*/
-public class POIFSDump {
-
+public final class POIFSDump {
//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 100_000;
+ private POIFSDump() {}
+
public static void main(String[] args) throws IOException {
if (args.length == 0) {
System.err.println("Must specify at least one file to dump");
}
System.out.println("Dumping " + filename);
- FileInputStream is = new FileInputStream(filename);
- NPOIFSFileSystem fs;
- try {
- fs = new NPOIFSFileSystem(is);
- } finally {
- is.close();
- }
- try {
+ try (FileInputStream is = new FileInputStream(filename);
+ POIFSFileSystem fs = new POIFSFileSystem(is)) {
DirectoryEntry root = fs.getRoot();
String filenameWithoutPath = new File(filename).getName();
File dumpDir = new File(filenameWithoutPath + "_dump");
dump(fs, header.getPropertyStart(), "properties", file);
}
if (dumpMini) {
- NPropertyTable props = fs.getPropertyTable();
+ PropertyTable props = fs.getPropertyTable();
int startBlock = props.getRoot().getStartBlock();
if (startBlock == POIFSConstants.END_OF_CHAIN) {
System.err.println("No Mini Stream in file");
dump(fs, startBlock, "mini-stream", file);
}
}
- } finally {
- fs.close();
}
}
}
byte[] bytes = IOUtils.toByteArray(is);
is.close();
- OutputStream out = new FileOutputStream(new File(parent, node.getName().trim()));
- try {
- out.write(bytes);
- } finally {
- out.close();
+ try (OutputStream out = new FileOutputStream(new File(parent, node.getName().trim()))) {
+ out.write(bytes);
}
} else if (entry instanceof DirectoryEntry){
DirectoryEntry dir = (DirectoryEntry)entry;
}
}
}
- public static void dump(NPOIFSFileSystem fs, int startBlock, String name, File parent) throws IOException {
+ public static void dump(POIFSFileSystem fs, int startBlock, String name, File parent) throws IOException {
File file = new File(parent, name);
- FileOutputStream out = new FileOutputStream(file);
- try {
- NPOIFSStream stream = new NPOIFSStream(fs, startBlock);
+ try (FileOutputStream out = new FileOutputStream(file)) {
+ POIFSStream stream = new POIFSStream(fs, startBlock);
byte[] b = IOUtils.safelyAllocate(fs.getBigBlockSize(), MAX_RECORD_LENGTH);
for (ByteBuffer bb : stream) {
bb.get(b);
out.write(b, 0, len);
}
- } finally {
- out.close();
}
}
}
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
boolean withSizes = false;
boolean newPOIFS = true;
- for (int j = 0; j < args.length; j++) {
- if (args[j].equalsIgnoreCase("-size") || args[j].equalsIgnoreCase("-sizes")) {
+ for (String arg : args) {
+ if (arg.equalsIgnoreCase("-size") || arg.equalsIgnoreCase("-sizes")) {
withSizes = true;
- } else if (args[j].equalsIgnoreCase("-old") || args[j].equalsIgnoreCase("-old-poifs")) {
+ } else if (arg.equalsIgnoreCase("-old") || arg.equalsIgnoreCase("-old-poifs")) {
newPOIFS = false;
} else {
- if(newPOIFS) {
- viewFile(args[j], withSizes);
+ if (newPOIFS) {
+ viewFile(arg, withSizes);
} else {
- viewFileOld(args[j], withSizes);
+ viewFileOld(arg, withSizes);
}
}
}
}
public static void viewFile(final String filename, boolean withSizes) throws IOException {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(filename));
+ POIFSFileSystem fs = new POIFSFileSystem(new File(filename));
displayDirectory(fs.getRoot(), "", withSizes);
fs.close();
}
import java.io.IOException;
import java.util.List;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
* A simple viewer for POIFS files
* @author Marc Johnson (mjohnson at apache dot org)
*/
-public class POIFSViewer
-{
+public final class POIFSViewer {
+
+ private POIFSViewer() {}
/**
* Display the contents of multiple POIFS files
}
boolean printNames = (args.length > 1);
- for (int j = 0; j < args.length; j++)
- {
- viewFile(args[ j ], printNames);
+ for (String arg : args) {
+ viewFile(arg, printNames);
}
}
System.out.println(flowerbox);
}
try {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(filename));
+ POIFSFileSystem fs = new POIFSFileSystem(new File(filename));
List<String> strings = POIFSViewEngine.inspectViewable(fs, true, 0, " ");
for (String s : strings) {
System.out.print(s);
System.out.println(e.getMessage());
}
}
-} // end public class POIFSViewer
-
+}
\ No newline at end of file
import java.io.InputStream;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
-import org.apache.poi.poifs.filesystem.NPOIFSDocument;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSDocument;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSDocumentPath;
import org.apache.poi.poifs.property.DirectoryProperty;
import org.apache.poi.poifs.property.DocumentProperty;
-import org.apache.poi.poifs.property.NPropertyTable;
+import org.apache.poi.poifs.property.PropertyTable;
import org.apache.poi.poifs.property.Property;
import org.apache.poi.poifs.property.RootProperty;
import org.apache.poi.util.IOUtils;
*/
public void read(final InputStream stream) throws IOException {
- try (NPOIFSFileSystem poifs = new NPOIFSFileSystem(stream)) {
+ try (POIFSFileSystem poifs = new POIFSFileSystem(stream)) {
read(poifs);
}
}
* @exception IOException on errors reading, or on invalid data
*/
public void read(final File poifsFile) throws IOException {
- try (NPOIFSFileSystem poifs = new NPOIFSFileSystem(poifsFile, true)) {
+ try (POIFSFileSystem poifs = new POIFSFileSystem(poifsFile, true)) {
read(poifs);
}
}
*
* @exception IOException on errors reading, or on invalid data
*/
- public void read(final NPOIFSFileSystem poifs) throws IOException {
+ public void read(final POIFSFileSystem poifs) throws IOException {
registryClosed = true;
// get property table from the document
- NPropertyTable properties = poifs.getPropertyTable();
+ PropertyTable properties = poifs.getPropertyTable();
// process documents
RootProperty root = properties.getRoot();
}
}
- private void processProperties(final NPOIFSFileSystem poifs, DirectoryProperty dir, final POIFSDocumentPath path) {
+ private void processProperties(final POIFSFileSystem poifs, DirectoryProperty dir, final POIFSDocumentPath path) {
boolean hasChildren = false;
for (final Property property : dir) {
hasChildren = true;
POIFSDocumentPath new_path = new POIFSDocumentPath(path,new String[]{name});
processProperties(poifs, (DirectoryProperty) property, new_path);
} else {
- NPOIFSDocument document = null;
+ POIFSDocument document = null;
for (POIFSReaderListener rl : registry.getListeners(path, name)) {
if (document == null) {
- document = new NPOIFSDocument((DocumentProperty)property, poifs);
+ document = new POIFSDocument((DocumentProperty)property, poifs);
}
try (DocumentInputStream dis = new DocumentInputStream(document)) {
POIFSReaderEvent pe = new POIFSReaderEvent(dis, path, name);
private final ArrayList<Entry> _entries = new ArrayList<>();
// the NPOIFSFileSytem we belong to
- private final NPOIFSFileSystem _nfilesystem;
+ private final POIFSFileSystem _nfilesystem;
// the path described by this document
private final POIFSDocumentPath _path;
* @param parent the parent of this entry
*/
DirectoryNode(final DirectoryProperty property,
- final NPOIFSFileSystem nfilesystem,
+ final POIFSFileSystem nfilesystem,
final DirectoryNode parent)
{
super(property, parent);
/**
* @return the filesystem that this belongs to
*/
- public NPOIFSFileSystem getFileSystem()
+ public POIFSFileSystem getFileSystem()
{
return _nfilesystem;
}
* that this belong to, otherwise Null if OPOIFS based
* @return the filesystem that this belongs to
*/
- public NPOIFSFileSystem getNFileSystem()
+ public POIFSFileSystem getNFileSystem()
{
return _nfilesystem;
}
*
* @param document the document to be opened
*
- * @return a newly opened DocumentInputStream or NDocumentInputStream
+ * @return a newly opened DocumentInputStream or DocumentInputStream
*
* @exception IOException if the document does not exist or the
* name is that of a DirectoryEntry
*
* @exception IOException if the document can't be created
*/
- DocumentEntry createDocument(final NPOIFSDocument document)
+ DocumentEntry createDocument(final POIFSDocument document)
throws IOException
{
DocumentProperty property = document.getDocumentProperty();
final InputStream stream)
throws IOException
{
- return createDocument(new NPOIFSDocument(name, _nfilesystem, stream));
+ return createDocument(new POIFSDocument(name, _nfilesystem, stream));
}
/**
final POIFSWriterListener writer)
throws IOException
{
- return createDocument(new NPOIFSDocument(name, size, _nfilesystem, writer));
+ return createDocument(new POIFSDocument(name, size, _nfilesystem, writer));
}
/**
return createDocument(name, stream);
} else {
DocumentNode existing = (DocumentNode)getEntry(name);
- NPOIFSDocument nDoc = new NPOIFSDocument(existing);
+ POIFSDocument nDoc = new POIFSDocument(existing);
nDoc.replaceContents(stream);
return existing;
}
* SlideShowFactory to combine common code here.
*/
@Internal
-public class DocumentFactoryHelper {
+public final class DocumentFactoryHelper {
+ private DocumentFactoryHelper() {
+ }
+
/**
* Wrap the OLE2 data in the NPOIFSFileSystem into a decrypted stream by using
* the given password.
* @return A stream for reading the decrypted data
* @throws IOException If an error occurs while decrypting or if the password does not match
*/
- public static InputStream getDecryptedStream(final NPOIFSFileSystem fs, String password)
+ public static InputStream getDecryptedStream(final POIFSFileSystem fs, String password)
throws IOException {
// wrap the stream in a FilterInputStream to close the NPOIFSFileSystem
// as well when the resulting OPCPackage is closed
package org.apache.poi.poifs.filesystem;
+import static org.apache.poi.util.LittleEndianConsts.INT_SIZE;
+import static org.apache.poi.util.LittleEndianConsts.LONG_SIZE;
+import static org.apache.poi.util.LittleEndianConsts.SHORT_SIZE;
+
import java.io.IOException;
import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+import org.apache.poi.poifs.property.DocumentProperty;
+import org.apache.poi.util.IOUtils;
+import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianInput;
-import org.apache.poi.util.SuppressForbidden;
/**
* This class provides methods to read a DocumentEntry managed by a
- * {@link POIFSFileSystem} or {@link NPOIFSFileSystem} instance.
- * It creates the appropriate one, and delegates, allowing us to
- * work transparently with the two.
+ * {@link POIFSFileSystem} instance.
*/
-public class DocumentInputStream extends InputStream implements LittleEndianInput {
- /** returned by read operations if we're at end of document */
- protected static final int EOF = -1;
-
- private DocumentInputStream delegate;
-
- /** For use by downstream implementations */
- protected DocumentInputStream() {}
-
- /**
- * Create an InputStream from the specified DocumentEntry
- *
- * @param document the DocumentEntry to be read
- *
- * @exception IOException if the DocumentEntry cannot be opened (like, maybe it has
- * been deleted?)
- */
- public DocumentInputStream(DocumentEntry document) throws IOException {
- if (!(document instanceof DocumentNode)) {
- throw new IOException("Cannot open internal document storage");
- }
- delegate = new NDocumentInputStream(document);
- }
+public final class DocumentInputStream extends InputStream implements LittleEndianInput {
+ /** returned by read operations if we're at end of document */
+ private static final int EOF = -1;
- /**
- * Create an InputStream from the specified Document
- *
- * @param document the Document to be read
- */
- public DocumentInputStream(NPOIFSDocument document) {
- delegate = new NDocumentInputStream(document);
- }
+ /** current offset into the Document */
+ private int _current_offset;
+ /** current block count */
+ private int _current_block_count;
+
+ /** current marked offset into the Document (used by mark and reset) */
+ private int _marked_offset;
+ /** and the block count for it */
+ private int _marked_offset_count;
+
+ /** the Document's size */
+ private final int _document_size;
+
+ /** have we been closed? */
+ private boolean _closed;
+
+ /** the actual Document */
+ private final POIFSDocument _document;
+
+ private Iterator<ByteBuffer> _data;
+ private ByteBuffer _buffer;
+
+ /**
+ * Create an InputStream from the specified DocumentEntry
+ *
+ * @param document the DocumentEntry to be read
+ *
+ * @exception IOException if the DocumentEntry cannot be opened (like, maybe it has
+ * been deleted?)
+ */
+ public DocumentInputStream(DocumentEntry document) throws IOException {
+ if (!(document instanceof DocumentNode)) {
+ throw new IOException("Cannot open internal document storage, " + document + " not a Document Node");
+ }
+ _current_offset = 0;
+ _current_block_count = 0;
+ _marked_offset = 0;
+ _marked_offset_count = 0;
+ _document_size = document.getSize();
+ _closed = false;
+
+ // can't be asserted ... see bug 61300
+ // assert (_document_size >= 0) : "Document size can't be < 0";
+
+ DocumentNode doc = (DocumentNode)document;
+ DocumentProperty property = (DocumentProperty)doc.getProperty();
+ _document = new POIFSDocument(
+ property,
+ ((DirectoryNode)doc.getParent()).getNFileSystem()
+ );
+ _data = _document.getBlockIterator();
+ }
+
+ /**
+ * Create an InputStream from the specified Document
+ *
+ * @param document the Document to be read
+ */
+ public DocumentInputStream(POIFSDocument document) {
+ _current_offset = 0;
+ _current_block_count = 0;
+ _marked_offset = 0;
+ _marked_offset_count = 0;
+ _document_size = document.getSize();
+ _closed = false;
+ _document = document;
+ _data = _document.getBlockIterator();
+ }
@Override
- @SuppressForbidden("just delegating")
- public int available() {
- return delegate.available();
- }
+ public int available() {
+ return remainingBytes();
+ }
+
+ /**
+ * Helper methods for forbidden api calls
+ *
+ * @return the bytes remaining until the end of the stream
+ */
+ private int remainingBytes() {
+ if (_closed) {
+ throw new IllegalStateException("cannot perform requested operation on a closed stream");
+ }
+ return _document_size - _current_offset;
+ }
@Override
- public void close() {
- delegate.close();
- }
+ public void close() {
+ _closed = true;
+ }
+ /**
+ * Tests if this input stream supports the mark and reset methods.
+ *
+ * @return {@code true} always
+ */
@Override
- public void mark(int ignoredReadlimit) {
- delegate.mark(ignoredReadlimit);
- }
+ public boolean markSupported() {
+ return true;
+ }
- /**
- * Tests if this input stream supports the mark and reset methods.
- *
- * @return <code>true</code> always
- */
@Override
- public boolean markSupported() {
- return true;
- }
+ public void mark(int ignoredReadlimit) {
+ _marked_offset = _current_offset;
+ _marked_offset_count = Math.max(0, _current_block_count - 1);
+ }
@Override
- public int read() throws IOException {
- return delegate.read();
- }
+ public int read() throws IOException {
+ dieIfClosed();
+ if (atEOD()) {
+ return EOF;
+ }
+ byte[] b = new byte[1];
+ int result = read(b, 0, 1);
+ if(result >= 0) {
+ if(b[0] < 0) {
+ return b[0]+256;
+ }
+ return b[0];
+ }
+ return result;
+ }
- @Override
- public int read(byte[] b) throws IOException {
- return read(b, 0, b.length);
- }
+ @Override
+ public int read(byte[] b) throws IOException {
+ return read(b, 0, b.length);
+ }
@Override
- public int read(byte[] b, int off, int len) throws IOException {
- return delegate.read(b, off, len);
- }
+ public int read(byte[] b, int off, int len) throws IOException {
+ dieIfClosed();
+ if (b == null) {
+ throw new IllegalArgumentException("buffer must not be null");
+ }
+ if (off < 0 || len < 0 || b.length < off + len) {
+ throw new IndexOutOfBoundsException("can't read past buffer boundaries");
+ }
+ if (len == 0) {
+ return 0;
+ }
+ if (atEOD()) {
+ return EOF;
+ }
+ int limit = Math.min(remainingBytes(), len);
+ readFully(b, off, limit);
+ return limit;
+ }
- /**
- * Repositions this stream to the position at the time the mark() method was
- * last called on this input stream. If mark() has not been called this
- * method repositions the stream to its beginning.
- */
+ /**
+ * Repositions this stream to the position at the time the mark() method was
+ * last called on this input stream. If mark() has not been called this
+ * method repositions the stream to its beginning.
+ */
@Override
- public void reset() {
- delegate.reset();
+ public void reset() {
+ // Special case for reset to the start
+ if(_marked_offset == 0 && _marked_offset_count == 0) {
+ _current_block_count = _marked_offset_count;
+ _current_offset = _marked_offset;
+ _data = _document.getBlockIterator();
+ _buffer = null;
+ return;
+ }
+
+ // Start again, then wind on to the required block
+ _data = _document.getBlockIterator();
+ _current_offset = 0;
+ for(int i=0; i<_marked_offset_count; i++) {
+ _buffer = _data.next();
+ _current_offset += _buffer.remaining();
+ }
+
+ _current_block_count = _marked_offset_count;
+
+ // Do we need to position within it?
+ if(_current_offset != _marked_offset) {
+ // Grab the right block
+ _buffer = _data.next();
+ _current_block_count++;
+
+ // Skip to the right place in it
+ // (It should be positioned already at the start of the block,
+ // we need to move further inside the block)
+ int skipBy = _marked_offset - _current_offset;
+ _buffer.position(_buffer.position() + skipBy);
+ }
+
+ // All done
+ _current_offset = _marked_offset;
}
- @Override
+ @Override
public long skip(long n) throws IOException {
- return delegate.skip(n);
+ dieIfClosed();
+ if (n < 0) {
+ return 0;
+ }
+ long new_offset = _current_offset + n;
+
+ if (new_offset < _current_offset) {
+ // wrap around in converting a VERY large long to an int
+ new_offset = _document_size;
+ } else if (new_offset > _document_size) {
+ new_offset = _document_size;
+ }
+
+ long rval = new_offset - _current_offset;
+
+ // TODO Do this better
+ byte[] skip = IOUtils.safelyAllocate(rval, Integer.MAX_VALUE);
+ readFully(skip);
+ return rval;
}
- @Override
- public byte readByte() {
- return delegate.readByte();
+ private void dieIfClosed() throws IOException {
+ if (_closed) {
+ throw new IOException("cannot perform requested operation on a closed stream");
+ }
}
- @Override
- public double readDouble() {
- return delegate.readDouble();
+ private boolean atEOD() {
+ return _current_offset == _document_size;
}
- @Override
- public short readShort() {
- return (short) readUShort();
+ private void checkAvaliable(int requestedSize) {
+ if (_closed) {
+ throw new IllegalStateException("cannot perform requested operation on a closed stream");
+ }
+ if (requestedSize > _document_size - _current_offset) {
+ throw new RuntimeException("Buffer underrun - requested " + requestedSize
+ + " bytes but " + (_document_size - _current_offset) + " was available");
+ }
}
@Override
@Override
public void readFully(byte[] buf, int off, int len) {
- delegate.readFully(buf, off, len);
- }
+ if (len < 0) {
+ throw new RuntimeException("Can't read negative number of bytes");
+ }
- @Override
- public long readLong() {
- return delegate.readLong();
+ checkAvaliable(len);
+
+ int read = 0;
+ while(read < len) {
+ if(_buffer == null || _buffer.remaining() == 0) {
+ _current_block_count++;
+ _buffer = _data.next();
+ }
+
+ int limit = Math.min(len-read, _buffer.remaining());
+ _buffer.get(buf, off+read, limit);
+ _current_offset += limit;
+ read += limit;
+ }
}
@Override
- public int readInt() {
- return delegate.readInt();
- }
+ public void readPlain(byte[] buf, int off, int len) {
+ readFully(buf, off, len);
+ }
+
@Override
- public int readUShort() {
- return delegate.readUShort();
+ public byte readByte() {
+ return (byte) readUByte();
+ }
+
+ @Override
+ public double readDouble() {
+ return Double.longBitsToDouble(readLong());
+ }
+
+ @Override
+ public long readLong() {
+ checkAvaliable(LONG_SIZE);
+ byte[] data = new byte[LONG_SIZE];
+ readFully(data, 0, LONG_SIZE);
+ return LittleEndian.getLong(data, 0);
}
- @Override
- public int readUByte() {
- return delegate.readUByte();
+ @Override
+ public short readShort() {
+ checkAvaliable(SHORT_SIZE);
+ byte[] data = new byte[SHORT_SIZE];
+ readFully(data, 0, SHORT_SIZE);
+ return LittleEndian.getShort(data);
+ }
+
+ @Override
+ public int readInt() {
+ checkAvaliable(INT_SIZE);
+ byte[] data = new byte[INT_SIZE];
+ readFully(data, 0, INT_SIZE);
+ return LittleEndian.getInt(data);
}
-
+
public long readUInt() {
int i = readInt();
return i & 0xFFFFFFFFL;
}
@Override
- public void readPlain(byte[] buf, int off, int len) {
- readFully(buf, off, len);
+ public int readUShort() {
+ checkAvaliable(SHORT_SIZE);
+ byte[] data = new byte[SHORT_SIZE];
+ readFully(data, 0, SHORT_SIZE);
+ return LittleEndian.getUShort(data);
+ }
+
+ @Override
+ public int readUByte() {
+ checkAvaliable(1);
+ byte[] data = new byte[1];
+ readFully(data, 0, 1);
+ if (data[0] >= 0)
+ return data[0];
+ return data[0] + 256;
}
}
{
// underlying POIFSDocument instance
- private NPOIFSDocument _document;
+ private POIFSDocument _document;
/**
* create a DocumentNode. This method is not public by design; it
*
* @return the internal POIFSDocument
*/
- NPOIFSDocument getDocument()
+ POIFSDocument getDocument()
{
return _document;
}
package org.apache.poi.poifs.filesystem;
-import java.io.*;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
-import java.util.*;
+import org.apache.poi.poifs.common.POIFSConstants;
+import org.apache.poi.poifs.property.DocumentProperty;
/**
- * This class provides a wrapper over an OutputStream so that Document
- * writers can't accidently go over their size limits
- *
- * @author Marc Johnson (mjohnson at apache dot org)
+ * This class provides methods to write a DocumentEntry managed by a
+ * {@link POIFSFileSystem} instance.
*/
-
public final class DocumentOutputStream extends OutputStream {
- private final OutputStream _stream;
- private final int _limit;
- private int _written;
+ /** the Document's size, i.e. the size of the big block data - mini block data is cached and not counted */
+ private int _document_size = 0;
+
+ /** have we been closed? */
+ private boolean _closed = false;
+
+ /** the actual Document */
+ private POIFSDocument _document;
+ /** and its Property */
+ private DocumentProperty _property;
+
+ /** our buffer, when null we're into normal blocks */
+ private ByteArrayOutputStream _buffer =
+ new ByteArrayOutputStream(POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE);
+
+ /** our main block stream, when we're into normal blocks */
+ private POIFSStream _stream;
+ private OutputStream _stream_output;
+
+ /** a write limit or -1 if unlimited */
+ private final long _limit;
+
+
+ /**
+ * Create an OutputStream from the specified DocumentEntry.
+ * The specified entry will be emptied.
+ *
+ * @param document the DocumentEntry to be written
+ */
+ public DocumentOutputStream(DocumentEntry document) throws IOException {
+ this(document, -1);
+ }
+
+ /**
+ * Create an OutputStream to create the specified new Entry
+ *
+ * @param parent Where to create the Entry
+ * @param name Name of the new entry
+ */
+ public DocumentOutputStream(DirectoryEntry parent, String name) throws IOException {
+ this(createDocument(parent, name), -1);
+ }
/**
* Create a DocumentOutputStream
*
- * @param stream the OutputStream to which the data is actually
- * read
+ * @param document the DocumentEntry to which the data is actually written
* @param limit the maximum number of bytes that can be written
*/
- DocumentOutputStream(OutputStream stream, int limit) {
- _stream = stream;
+ DocumentOutputStream(DocumentEntry document, long limit) throws IOException {
+ this(getDocument(document), limit);
+ }
+
+ DocumentOutputStream(POIFSDocument document, long limit) throws IOException {
+ _document = document;
+ _document.free();
+
+ _property = document.getDocumentProperty();
+
_limit = limit;
- _written = 0;
}
- /**
- * Writes the specified byte to this output stream. The general
- * contract for write is that one byte is written to the output
- * stream. The byte to be written is the eight low-order bits of
- * the argument b. The 24 high-order bits of b are ignored.
- *
- * @param b the byte.
- * @exception IOException if an I/O error occurs. In particular,
- * an IOException may be thrown if the
- * output stream has been closed, or if the
- * writer tries to write too much data.
- */
- public void write(int b)
- throws IOException
- {
- limitCheck(1);
- _stream.write(b);
+ private static POIFSDocument getDocument(DocumentEntry document) throws IOException {
+ if (!(document instanceof DocumentNode)) {
+ throw new IOException("Cannot open internal document storage, " + document + " not a Document Node");
+ }
+ return new POIFSDocument((DocumentNode)document);
}
- /**
- * Writes b.length bytes from the specified byte array
- * to this output stream.
- *
- * @param b the data.
- * @exception IOException if an I/O error occurs.
- */
- public void write(byte b[])
- throws IOException
- {
- write(b, 0, b.length);
+ private static DocumentEntry createDocument(DirectoryEntry parent, String name) throws IOException {
+ if (!(parent instanceof DirectoryNode)) {
+ throw new IOException("Cannot open internal directory storage, " + parent + " not a Directory Node");
+ }
+
+ // Have an empty one created for now
+ return parent.createDocument(name, new ByteArrayInputStream(new byte[0]));
}
- /**
- * Writes len bytes from the specified byte array starting at
- * offset off to this output stream. The general contract for
- * write(b, off, len) is that some of the bytes in the array b are
- * written to the output stream in order; element b[off] is the
- * first byte written and b[off+len-1] is the last byte written by
- * this operation.<p>
- * If b is null, a NullPointerException is thrown.<p>
- * If off is negative, or len is negative, or off+len is greater
- * than the length of the array b, then an
- * IndexOutOfBoundsException is thrown.
- *
- * @param b the data.
- * @param off the start offset in the data.
- * @param len the number of bytes to write.
- * @exception IOException if an I/O error occurs. In particular,
- * an IOException</code> is thrown if the
- * output stream is closed or if the writer
- * tries to write too many bytes.
- */
- public void write(byte b[], int off, int len)
- throws IOException
- {
- limitCheck(len);
- _stream.write(b, off, len);
+ private void checkBufferSize() throws IOException {
+ // Have we gone over the mini stream limit yet?
+ if (_buffer.size() > POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
+ // Will need to be in the main stream
+ byte[] data = _buffer.toByteArray();
+ _buffer = null;
+ write(data, 0, data.length);
+ } else {
+ // So far, mini stream will work, keep going
+ }
}
- /**
- * Flushes this output stream and forces any buffered output bytes
- * to be written out.
- *
- * @exception IOException if an I/O error occurs.
- */
- public void flush()
- throws IOException
- {
- _stream.flush();
+ public void write(int b) throws IOException {
+ write(new byte[] { (byte)b }, 0, 1);
}
- /**
- * Closes this output stream and releases any system resources
- * associated with this stream. The general contract of close is
- * that it closes the output stream. A closed stream cannot
- * perform output operations and cannot be reopened.
- *
- * @exception IOException if an I/O error occurs.
- */
- public void close() {
+ @Override
+ public void write(byte[] b, int off, int len) throws IOException {
+ if (_closed) {
+ throw new IOException("cannot perform requested operation on a closed stream");
+ }
+ if (_limit > -1 && (size() + len) > _limit) {
+ throw new IOException("tried to write too much data");
+ }
- // ignore this call
+ if (_buffer != null) {
+ _buffer.write(b, off, len);
+ checkBufferSize();
+ } else {
+ if (_stream == null) {
+ _stream = new POIFSStream(_document.getFileSystem());
+ _stream_output = _stream.getOutputStream();
+ }
+ _stream_output.write(b, off, len);
+ _document_size += len;
+ }
}
- /**
- * write the rest of the document's data (fill in at the end)
- *
- * @param totalLimit the actual number of bytes the corresponding
- * document must fill
- * @param fill the byte to fill remaining space with
- *
- * @exception IOException on I/O error
- */
- void writeFiller(int totalLimit, byte fill)
- throws IOException
- {
- if (totalLimit > _written)
- {
- byte[] filler = new byte[ totalLimit - _written ];
-
- Arrays.fill(filler, fill);
- _stream.write(filler);
+ public void close() throws IOException {
+ // Do we have a pending buffer for the mini stream?
+ if (_buffer != null) {
+ // It's not much data, so ask POIFSDocument to do it for us
+ _document.replaceContents(new ByteArrayInputStream(_buffer.toByteArray()));
+ }
+ else {
+ // We've been writing to the stream as we've gone along
+ // Update the details on the property now
+ _stream_output.close();
+ _property.updateSize(_document_size);
+ _property.setStartBlock(_stream.getStartBlock());
}
+
+ // No more!
+ _closed = true;
}
- private void limitCheck(int toBeWritten)
- throws IOException
- {
- if ((_written + toBeWritten) > _limit)
- {
- throw new IOException("tried to write too much data");
- }
- _written += toBeWritten;
+ /**
+ * @return the amount of written bytes
+ */
+ public long size() {
+ return _document_size + (_buffer == null ? 0 : _buffer.size());
}
-}
+}
\ No newline at end of file
* @param target
* is the target POIFS to copy to
*/
- public static void copyNodes( NPOIFSFileSystem source, NPOIFSFileSystem target )
+ public static void copyNodes(POIFSFileSystem source, POIFSFileSystem target )
throws IOException {
copyNodes( source.getRoot(), target.getRoot() );
}
* @param target is the target POIFS to copy to
* @param excepts is a list of Entry Names to be excluded from the copy
*/
- public static void copyNodes( NPOIFSFileSystem source, NPOIFSFileSystem target, List<String> excepts )
+ public static void copyNodes(POIFSFileSystem source, POIFSFileSystem target, List<String> excepts )
throws IOException {
copyNodes(
new FilteringDirectoryNode(source.getRoot(), excepts),
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.filesystem;
-
-import static org.apache.poi.util.LittleEndianConsts.INT_SIZE;
-import static org.apache.poi.util.LittleEndianConsts.LONG_SIZE;
-import static org.apache.poi.util.LittleEndianConsts.SHORT_SIZE;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-
-import org.apache.poi.poifs.property.DocumentProperty;
-import org.apache.poi.util.IOUtils;
-import org.apache.poi.util.LittleEndian;
-
-/**
- * This class provides methods to read a DocumentEntry managed by a
- * {@link NPOIFSFileSystem} instance.
- */
-public final class NDocumentInputStream extends DocumentInputStream {
- /** current offset into the Document */
- private int _current_offset;
- /** current block count */
- private int _current_block_count;
-
- /** current marked offset into the Document (used by mark and reset) */
- private int _marked_offset;
- /** and the block count for it */
- private int _marked_offset_count;
-
- /** the Document's size */
- private final int _document_size;
-
- /** have we been closed? */
- private boolean _closed;
-
- /** the actual Document */
- private final NPOIFSDocument _document;
-
- private Iterator<ByteBuffer> _data;
- private ByteBuffer _buffer;
-
- /**
- * Create an InputStream from the specified DocumentEntry
- *
- * @param document the DocumentEntry to be read
- *
- * @exception IOException if the DocumentEntry cannot be opened (like, maybe it has
- * been deleted?)
- */
- public NDocumentInputStream(DocumentEntry document) throws IOException {
- if (!(document instanceof DocumentNode)) {
- throw new IOException("Cannot open internal document storage, " + document + " not a Document Node");
- }
- _current_offset = 0;
- _current_block_count = 0;
- _marked_offset = 0;
- _marked_offset_count = 0;
- _document_size = document.getSize();
- _closed = false;
-
- // can't be asserted ... see bug 61300
- // assert (_document_size >= 0) : "Document size can't be < 0";
-
- DocumentNode doc = (DocumentNode)document;
- DocumentProperty property = (DocumentProperty)doc.getProperty();
- _document = new NPOIFSDocument(
- property,
- ((DirectoryNode)doc.getParent()).getNFileSystem()
- );
- _data = _document.getBlockIterator();
- }
-
- /**
- * Create an InputStream from the specified Document
- *
- * @param document the Document to be read
- */
- public NDocumentInputStream(NPOIFSDocument document) {
- _current_offset = 0;
- _current_block_count = 0;
- _marked_offset = 0;
- _marked_offset_count = 0;
- _document_size = document.getSize();
- _closed = false;
- _document = document;
- _data = _document.getBlockIterator();
- }
-
- @Override
- public int available() {
- return remainingBytes();
- }
-
- /**
- * Helper methods for forbidden api calls
- *
- * @return the bytes remaining until the end of the stream
- */
- private int remainingBytes() {
- if (_closed) {
- throw new IllegalStateException("cannot perform requested operation on a closed stream");
- }
- return _document_size - _current_offset;
- }
-
- @Override
- public void close() {
- _closed = true;
- }
-
- @Override
- public void mark(int ignoredReadlimit) {
- _marked_offset = _current_offset;
- _marked_offset_count = Math.max(0, _current_block_count - 1);
- }
-
- @Override
- public int read() throws IOException {
- dieIfClosed();
- if (atEOD()) {
- return EOF;
- }
- byte[] b = new byte[1];
- int result = read(b, 0, 1);
- if(result >= 0) {
- if(b[0] < 0) {
- return b[0]+256;
- }
- return b[0];
- }
- return result;
- }
-
- @Override
- public int read(byte[] b, int off, int len) throws IOException {
- dieIfClosed();
- if (b == null) {
- throw new IllegalArgumentException("buffer must not be null");
- }
- if (off < 0 || len < 0 || b.length < off + len) {
- throw new IndexOutOfBoundsException("can't read past buffer boundaries");
- }
- if (len == 0) {
- return 0;
- }
- if (atEOD()) {
- return EOF;
- }
- int limit = Math.min(remainingBytes(), len);
- readFully(b, off, limit);
- return limit;
- }
-
- /**
- * Repositions this stream to the position at the time the mark() method was
- * last called on this input stream. If mark() has not been called this
- * method repositions the stream to its beginning.
- */
- @Override
- public void reset() {
- // Special case for reset to the start
- if(_marked_offset == 0 && _marked_offset_count == 0) {
- _current_block_count = _marked_offset_count;
- _current_offset = _marked_offset;
- _data = _document.getBlockIterator();
- _buffer = null;
- return;
- }
-
- // Start again, then wind on to the required block
- _data = _document.getBlockIterator();
- _current_offset = 0;
- for(int i=0; i<_marked_offset_count; i++) {
- _buffer = _data.next();
- _current_offset += _buffer.remaining();
- }
-
- _current_block_count = _marked_offset_count;
-
- // Do we need to position within it?
- if(_current_offset != _marked_offset) {
- // Grab the right block
- _buffer = _data.next();
- _current_block_count++;
-
- // Skip to the right place in it
- // (It should be positioned already at the start of the block,
- // we need to move further inside the block)
- int skipBy = _marked_offset - _current_offset;
- _buffer.position(_buffer.position() + skipBy);
- }
-
- // All done
- _current_offset = _marked_offset;
- }
-
- @Override
- public long skip(long n) throws IOException {
- dieIfClosed();
- if (n < 0) {
- return 0;
- }
- long new_offset = _current_offset + n;
-
- if (new_offset < _current_offset) {
- // wrap around in converting a VERY large long to an int
- new_offset = _document_size;
- } else if (new_offset > _document_size) {
- new_offset = _document_size;
- }
-
- long rval = new_offset - _current_offset;
-
- // TODO Do this better
- byte[] skip = IOUtils.safelyAllocate(rval, Integer.MAX_VALUE);
- readFully(skip);
- return rval;
- }
-
- private void dieIfClosed() throws IOException {
- if (_closed) {
- throw new IOException("cannot perform requested operation on a closed stream");
- }
- }
-
- private boolean atEOD() {
- return _current_offset == _document_size;
- }
-
- private void checkAvaliable(int requestedSize) {
- if (_closed) {
- throw new IllegalStateException("cannot perform requested operation on a closed stream");
- }
- if (requestedSize > _document_size - _current_offset) {
- throw new RuntimeException("Buffer underrun - requested " + requestedSize
- + " bytes but " + (_document_size - _current_offset) + " was available");
- }
- }
-
- @Override
- public void readFully(byte[] buf, int off, int len) {
- if (len < 0) {
- throw new RuntimeException("Can't read negative number of bytes");
- }
-
- checkAvaliable(len);
-
- int read = 0;
- while(read < len) {
- if(_buffer == null || _buffer.remaining() == 0) {
- _current_block_count++;
- _buffer = _data.next();
- }
-
- int limit = Math.min(len-read, _buffer.remaining());
- _buffer.get(buf, off+read, limit);
- _current_offset += limit;
- read += limit;
- }
- }
-
- @Override
- public byte readByte() {
- return (byte) readUByte();
- }
-
- @Override
- public double readDouble() {
- return Double.longBitsToDouble(readLong());
- }
-
- @Override
- public long readLong() {
- checkAvaliable(LONG_SIZE);
- byte[] data = new byte[LONG_SIZE];
- readFully(data, 0, LONG_SIZE);
- return LittleEndian.getLong(data, 0);
- }
-
- @Override
- public short readShort() {
- checkAvaliable(SHORT_SIZE);
- byte[] data = new byte[SHORT_SIZE];
- readFully(data, 0, SHORT_SIZE);
- return LittleEndian.getShort(data);
- }
-
- @Override
- public int readInt() {
- checkAvaliable(INT_SIZE);
- byte[] data = new byte[INT_SIZE];
- readFully(data, 0, INT_SIZE);
- return LittleEndian.getInt(data);
- }
-
- @Override
- public int readUShort() {
- checkAvaliable(SHORT_SIZE);
- byte[] data = new byte[SHORT_SIZE];
- readFully(data, 0, SHORT_SIZE);
- return LittleEndian.getUShort(data);
- }
-
- @Override
- public int readUByte() {
- checkAvaliable(1);
- byte[] data = new byte[1];
- readFully(data, 0, 1);
- if (data[0] >= 0)
- return data[0];
- return data[0] + 256;
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.filesystem;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.property.DocumentProperty;
-
-/**
- * This class provides methods to write a DocumentEntry managed by a
- * {@link NPOIFSFileSystem} instance.
- */
-public final class NDocumentOutputStream extends OutputStream {
- /** the Document's size */
- private int _document_size;
-
- /** have we been closed? */
- private boolean _closed;
-
- /** the actual Document */
- private NPOIFSDocument _document;
- /** and its Property */
- private DocumentProperty _property;
-
- /** our buffer, when null we're into normal blocks */
- private ByteArrayOutputStream _buffer =
- new ByteArrayOutputStream(POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE);
-
- /** our main block stream, when we're into normal blocks */
- private NPOIFSStream _stream;
- private OutputStream _stream_output;
-
- /**
- * Create an OutputStream from the specified DocumentEntry.
- * The specified entry will be emptied.
- *
- * @param document the DocumentEntry to be written
- */
- public NDocumentOutputStream(DocumentEntry document) throws IOException {
- if (!(document instanceof DocumentNode)) {
- throw new IOException("Cannot open internal document storage, " + document + " not a Document Node");
- }
- _document_size = 0;
- _closed = false;
-
- _property = (DocumentProperty)((DocumentNode)document).getProperty();
-
- _document = new NPOIFSDocument((DocumentNode)document);
- _document.free();
- }
-
- /**
- * Create an OutputStream to create the specified new Entry
- *
- * @param parent Where to create the Entry
- * @param name Name of the new entry
- */
- public NDocumentOutputStream(DirectoryEntry parent, String name) throws IOException {
- if (!(parent instanceof DirectoryNode)) {
- throw new IOException("Cannot open internal directory storage, " + parent + " not a Directory Node");
- }
- _document_size = 0;
- _closed = false;
-
- // Have an empty one created for now
- DocumentEntry doc = parent.createDocument(name, new ByteArrayInputStream(new byte[0]));
- _property = (DocumentProperty)((DocumentNode)doc).getProperty();
- _document = new NPOIFSDocument((DocumentNode)doc);
- }
-
- private void dieIfClosed() throws IOException {
- if (_closed) {
- throw new IOException("cannot perform requested operation on a closed stream");
- }
- }
-
- private void checkBufferSize() throws IOException {
- // Have we gone over the mini stream limit yet?
- if (_buffer.size() > POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
- // Will need to be in the main stream
- byte[] data = _buffer.toByteArray();
- _buffer = null;
- write(data, 0, data.length);
- } else {
- // So far, mini stream will work, keep going
- }
- }
-
- public void write(int b) throws IOException {
- dieIfClosed();
-
- if (_buffer != null) {
- _buffer.write(b);
- checkBufferSize();
- } else {
- write(new byte[] { (byte)b });
- }
- }
-
- public void write(byte[] b) throws IOException {
- dieIfClosed();
-
- if (_buffer != null) {
- _buffer.write(b);
- checkBufferSize();
- } else {
- write(b, 0, b.length);
- }
- }
-
- public void write(byte[] b, int off, int len) throws IOException {
- dieIfClosed();
-
- if (_buffer != null) {
- _buffer.write(b, off, len);
- checkBufferSize();
- } else {
- if (_stream == null) {
- _stream = new NPOIFSStream(_document.getFileSystem());
- _stream_output = _stream.getOutputStream();
- }
- _stream_output.write(b, off, len);
- _document_size += len;
- }
- }
-
- public void close() throws IOException {
- // Do we have a pending buffer for the mini stream?
- if (_buffer != null) {
- // It's not much data, so ask NPOIFSDocument to do it for us
- _document.replaceContents(new ByteArrayInputStream(_buffer.toByteArray()));
- }
- else {
- // We've been writing to the stream as we've gone along
- // Update the details on the property now
- _stream_output.close();
- _property.updateSize(_document_size);
- _property.setStartBlock(_stream.getStartBlock());
- }
-
- // No more!
- _closed = true;
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.filesystem;
-
-import static java.util.Collections.emptyList;
-
-import java.io.BufferedInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.dev.POIFSViewable;
-import org.apache.poi.poifs.property.DocumentProperty;
-import org.apache.poi.util.HexDump;
-import org.apache.poi.util.IOUtils;
-
-/**
- * This class manages a document in the NIO POIFS filesystem.
- * This is the {@link NPOIFSFileSystem} version.
- */
-public final class NPOIFSDocument implements POIFSViewable, Iterable<ByteBuffer> {
-
- //arbitrarily selected; may need to increase
- private static final int MAX_RECORD_LENGTH = 100_000;
-
- private DocumentProperty _property;
-
- private NPOIFSFileSystem _filesystem;
- private NPOIFSStream _stream;
- private int _block_size;
-
- /**
- * Constructor for an existing Document
- */
- public NPOIFSDocument(DocumentNode document) {
- this((DocumentProperty)document.getProperty(),
- ((DirectoryNode)document.getParent()).getNFileSystem());
- }
-
- /**
- * Constructor for an existing Document
- */
- public NPOIFSDocument(DocumentProperty property, NPOIFSFileSystem filesystem) {
- this._property = property;
- this._filesystem = filesystem;
-
- if(property.getSize() < POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
- _stream = new NPOIFSStream(_filesystem.getMiniStore(), property.getStartBlock());
- _block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
- } else {
- _stream = new NPOIFSStream(_filesystem, property.getStartBlock());
- _block_size = _filesystem.getBlockStoreBlockSize();
- }
- }
-
- /**
- * Constructor for a new Document
- *
- * @param name the name of the POIFSDocument
- * @param stream the InputStream we read data from
- */
- public NPOIFSDocument(String name, NPOIFSFileSystem filesystem, InputStream stream)
- throws IOException
- {
- this._filesystem = filesystem;
-
- // Store it
- int length = store(stream);
-
- // Build the property for it
- this._property = new DocumentProperty(name, length);
- _property.setStartBlock(_stream.getStartBlock());
- _property.setDocument(this);
- }
-
- public NPOIFSDocument(String name, int size, NPOIFSFileSystem filesystem, POIFSWriterListener writer)
- throws IOException
- {
- this._filesystem = filesystem;
-
- if (size < POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
- _stream = new NPOIFSStream(filesystem.getMiniStore());
- _block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
- } else {
- _stream = new NPOIFSStream(filesystem);
- _block_size = _filesystem.getBlockStoreBlockSize();
- }
-
- OutputStream innerOs = _stream.getOutputStream();
- DocumentOutputStream os = new DocumentOutputStream(innerOs, size);
- POIFSDocumentPath path = new POIFSDocumentPath(name.split("\\\\"));
- String docName = path.getComponent(path.length()-1);
- POIFSWriterEvent event = new POIFSWriterEvent(os, path, docName, size);
- writer.processPOIFSWriterEvent(event);
- innerOs.close();
-
- // And build the property for it
- this._property = new DocumentProperty(name, size);
- _property.setStartBlock(_stream.getStartBlock());
- _property.setDocument(this);
- }
-
- /**
- * Stores the given data for this Document
- */
- private int store(InputStream stream) throws IOException {
- final int bigBlockSize = POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE;
- BufferedInputStream bis = new BufferedInputStream(stream, bigBlockSize+1);
- bis.mark(bigBlockSize);
-
- // Do we need to store as a mini stream or a full one?
- long streamBlockSize = IOUtils.skipFully(bis, bigBlockSize);
- if (streamBlockSize < bigBlockSize) {
- _stream = new NPOIFSStream(_filesystem.getMiniStore());
- _block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
- } else {
- _stream = new NPOIFSStream(_filesystem);
- _block_size = _filesystem.getBlockStoreBlockSize();
- }
-
- // start from the beginning
- bis.reset();
-
- // Store it
- final long length;
- try (OutputStream os = _stream.getOutputStream()) {
- length = IOUtils.copy(bis, os);
-
- // Pad to the end of the block with -1s
- int usedInBlock = (int) (length % _block_size);
- if (usedInBlock != 0 && usedInBlock != _block_size) {
- int toBlockEnd = _block_size - usedInBlock;
- byte[] padding = IOUtils.safelyAllocate(toBlockEnd, MAX_RECORD_LENGTH);
- Arrays.fill(padding, (byte) 0xFF);
- os.write(padding);
- }
- }
-
- return (int)length;
- }
-
- /**
- * Frees the underlying stream and property
- */
- void free() throws IOException {
- _stream.free();
- _property.setStartBlock(POIFSConstants.END_OF_CHAIN);
- }
-
- NPOIFSFileSystem getFileSystem()
- {
- return _filesystem;
- }
-
- int getDocumentBlockSize() {
- return _block_size;
- }
-
- @Override
- public Iterator<ByteBuffer> iterator() {
- return getBlockIterator();
- }
-
- Iterator<ByteBuffer> getBlockIterator() {
- return (getSize() > 0 ? _stream : Collections.<ByteBuffer>emptyList()).iterator();
- }
-
- /**
- * @return size of the document
- */
- public int getSize() {
- return _property.getSize();
- }
-
- public void replaceContents(InputStream stream) throws IOException {
- free();
- int size = store(stream);
- _property.setStartBlock(_stream.getStartBlock());
- _property.updateSize(size);
- }
-
- /**
- * @return the instance's DocumentProperty
- */
- DocumentProperty getDocumentProperty() {
- return _property;
- }
-
- /**
- * Get an array of objects, some of which may implement POIFSViewable
- *
- * @return an array of Object; may not be null, but may be empty
- */
- public Object[] getViewableArray() {
- String result = "<NO DATA>";
-
- if(getSize() > 0) {
- // Get all the data into a single array
- byte[] data = IOUtils.safelyAllocate(getSize(), MAX_RECORD_LENGTH);
- int offset = 0;
- for(ByteBuffer buffer : _stream) {
- int length = Math.min(_block_size, data.length-offset);
- buffer.get(data, offset, length);
- offset += length;
- }
-
- result = HexDump.dump(data, 0, 0);
- }
-
- return new String[]{ result };
- }
-
- /**
- * Get an Iterator of objects, some of which may implement POIFSViewable
- *
- * @return an Iterator; may not be null, but may have an empty back end
- * store
- */
- public Iterator<Object> getViewableIterator() {
- return emptyList().iterator();
- }
-
- /**
- * Give viewers a hint as to whether to call getViewableArray or
- * getViewableIterator
- *
- * @return <code>true</code> if a viewer should call getViewableArray,
- * <code>false</code> if a viewer should call getViewableIterator
- */
- public boolean preferArray() {
- return true;
- }
-
- /**
- * Provides a short description of the object, to be used when a
- * POIFSViewable object has not provided its contents.
- *
- * @return short description
- */
- public String getShortDescription() {
-
- return "Document: \"" + _property.getName() + "\" size = " + getSize();
- }
-}
+++ /dev/null
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.poifs.filesystem;
-
-import java.io.Closeable;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import java.nio.channels.Channels;
-import java.nio.channels.FileChannel;
-import java.nio.channels.ReadableByteChannel;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.poi.EmptyFileException;
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.dev.POIFSViewable;
-import org.apache.poi.poifs.nio.ByteArrayBackedDataSource;
-import org.apache.poi.poifs.nio.DataSource;
-import org.apache.poi.poifs.nio.FileBackedDataSource;
-import org.apache.poi.poifs.property.DirectoryProperty;
-import org.apache.poi.poifs.property.DocumentProperty;
-import org.apache.poi.poifs.property.NPropertyTable;
-import org.apache.poi.poifs.storage.BATBlock;
-import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex;
-import org.apache.poi.poifs.storage.BlockAllocationTableReader;
-import org.apache.poi.poifs.storage.BlockAllocationTableWriter;
-import org.apache.poi.poifs.storage.HeaderBlock;
-import org.apache.poi.poifs.storage.HeaderBlockWriter;
-import org.apache.poi.util.CloseIgnoringInputStream;
-import org.apache.poi.util.IOUtils;
-import org.apache.poi.util.Internal;
-import org.apache.poi.util.POILogFactory;
-import org.apache.poi.util.POILogger;
-
-/**
- * <p>This is the main class of the POIFS system; it manages the entire
- * life cycle of the filesystem.</p>
- * <p>This is the new NIO version, which uses less memory</p>
- */
-
-public class NPOIFSFileSystem extends BlockStore
- implements POIFSViewable, Closeable
-{
- //arbitrarily selected; may need to increase
- private static final int MAX_RECORD_LENGTH = 100_000;
-
- private static final POILogger LOG = POILogFactory.getLogger(NPOIFSFileSystem.class);
-
- /**
- * Convenience method for clients that want to avoid the auto-close behaviour of the constructor.
- */
- public static InputStream createNonClosingInputStream(InputStream is) {
- return new CloseIgnoringInputStream(is);
- }
-
- private NPOIFSMiniStore _mini_store;
- private NPropertyTable _property_table;
- private List<BATBlock> _xbat_blocks;
- private List<BATBlock> _bat_blocks;
- private HeaderBlock _header;
- private DirectoryNode _root;
-
- private DataSource _data;
-
- /**
- * What big block size the file uses. Most files
- * use 512 bytes, but a few use 4096
- */
- private POIFSBigBlockSize bigBlockSize =
- POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;
-
- private NPOIFSFileSystem(boolean newFS)
- {
- _header = new HeaderBlock(bigBlockSize);
- _property_table = new NPropertyTable(_header);
- _mini_store = new NPOIFSMiniStore(this, _property_table.getRoot(), new ArrayList<>(), _header);
- _xbat_blocks = new ArrayList<>();
- _bat_blocks = new ArrayList<>();
- _root = null;
-
- if(newFS) {
- // Data needs to initially hold just the header block,
- // a single bat block, and an empty properties section
- _data = new ByteArrayBackedDataSource(IOUtils.safelyAllocate(
- bigBlockSize.getBigBlockSize()*3, MAX_RECORD_LENGTH));
- }
- }
-
- /**
- * Constructor, intended for writing
- */
- public NPOIFSFileSystem()
- {
- this(true);
-
- // Reserve block 0 for the start of the Properties Table
- // Create a single empty BAT, at pop that at offset 1
- _header.setBATCount(1);
- _header.setBATArray(new int[] { 1 });
- BATBlock bb = BATBlock.createEmptyBATBlock(bigBlockSize, false);
- bb.setOurBlockIndex(1);
- _bat_blocks.add(bb);
-
- setNextBlock(0, POIFSConstants.END_OF_CHAIN);
- setNextBlock(1, POIFSConstants.FAT_SECTOR_BLOCK);
-
- _property_table.setStartBlock(0);
- }
-
- /**
- * <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
- * creating from an <tt>InputStream</tt>. The File will be opened read-only</p>
- *
- * <p>Note that with this constructor, you will need to call {@link #close()}
- * when you're done to have the underlying file closed, as the file is
- * kept open during normal operation to read the data out.</p>
- *
- * @param file the File from which to read the data
- *
- * @exception IOException on errors reading, or on invalid data
- */
- public NPOIFSFileSystem(File file)
- throws IOException
- {
- this(file, true);
- }
-
- /**
- * <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
- * creating from an <tt>InputStream</tt>.</p>
- *
- * <p>Note that with this constructor, you will need to call {@link #close()}
- * when you're done to have the underlying file closed, as the file is
- * kept open during normal operation to read the data out.</p>
- *
- * @param file the File from which to read or read/write the data
- * @param readOnly whether the POIFileSystem will only be used in read-only mode
- *
- * @exception IOException on errors reading, or on invalid data
- */
- public NPOIFSFileSystem(File file, boolean readOnly)
- throws IOException
- {
- this(null, file, readOnly, true);
- }
-
- /**
- * <p>Creates a POIFSFileSystem from an open <tt>FileChannel</tt>. This uses
- * less memory than creating from an <tt>InputStream</tt>. The stream will
- * be used in read-only mode.</p>
- *
- * <p>Note that with this constructor, you will need to call {@link #close()}
- * when you're done to have the underlying Channel closed, as the channel is
- * kept open during normal operation to read the data out.</p>
- *
- * @param channel the FileChannel from which to read the data
- *
- * @exception IOException on errors reading, or on invalid data
- */
- public NPOIFSFileSystem(FileChannel channel)
- throws IOException
- {
- this(channel, true);
- }
-
- /**
- * <p>Creates a POIFSFileSystem from an open <tt>FileChannel</tt>. This uses
- * less memory than creating from an <tt>InputStream</tt>.</p>
- *
- * <p>Note that with this constructor, you will need to call {@link #close()}
- * when you're done to have the underlying Channel closed, as the channel is
- * kept open during normal operation to read the data out.</p>
- *
- * @param channel the FileChannel from which to read or read/write the data
- * @param readOnly whether the POIFileSystem will only be used in read-only mode
- *
- * @exception IOException on errors reading, or on invalid data
- */
- public NPOIFSFileSystem(FileChannel channel, boolean readOnly)
- throws IOException
- {
- this(channel, null, readOnly, false);
- }
-
- private NPOIFSFileSystem(FileChannel channel, File srcFile, boolean readOnly, boolean closeChannelOnError)
- throws IOException
- {
- this(false);
-
- try {
- // Initialize the datasource
- if (srcFile != null) {
- if (srcFile.length() == 0)
- throw new EmptyFileException();
-
- FileBackedDataSource d = new FileBackedDataSource(srcFile, readOnly);
- channel = d.getChannel();
- _data = d;
- } else {
- _data = new FileBackedDataSource(channel, readOnly);
- }
-
- // Get the header
- ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
- IOUtils.readFully(channel, headerBuffer);
-
- // Have the header processed
- _header = new HeaderBlock(headerBuffer);
-
- // Now process the various entries
- readCoreContents();
- } catch(IOException | RuntimeException e) {
- // Comes from Iterators etc.
- // TODO Decide if we can handle these better whilst
- // still sticking to the iterator contract
- if (closeChannelOnError && channel != null) {
- channel.close();
- channel = null;
- }
- throw e;
- }
- }
-
- /**
- * Create a POIFSFileSystem from an <tt>InputStream</tt>. Normally the stream is read until
- * EOF. The stream is always closed.<p>
- *
- * Some streams are usable after reaching EOF (typically those that return <code>true</code>
- * for <tt>markSupported()</tt>). In the unlikely case that the caller has such a stream
- * <i>and</i> needs to use it after this constructor completes, a work around is to wrap the
- * stream in order to trap the <tt>close()</tt> call. A convenience method (
- * <tt>createNonClosingInputStream()</tt>) has been provided for this purpose:
- * <pre>
- * InputStream wrappedStream = POIFSFileSystem.createNonClosingInputStream(is);
- * HSSFWorkbook wb = new HSSFWorkbook(wrappedStream);
- * is.reset();
- * doSomethingElse(is);
- * </pre>
- * Note also the special case of <tt>ByteArrayInputStream</tt> for which the <tt>close()</tt>
- * method does nothing.
- * <pre>
- * ByteArrayInputStream bais = ...
- * HSSFWorkbook wb = new HSSFWorkbook(bais); // calls bais.close() !
- * bais.reset(); // no problem
- * doSomethingElse(bais);
- * </pre>
- *
- * @param stream the InputStream from which to read the data
- *
- * @exception IOException on errors reading, or on invalid data
- */
-
- public NPOIFSFileSystem(InputStream stream)
- throws IOException
- {
- this(false);
-
- ReadableByteChannel channel = null;
- boolean success = false;
-
- try {
- // Turn our InputStream into something NIO based
- channel = Channels.newChannel(stream);
-
- // Get the header
- ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
- IOUtils.readFully(channel, headerBuffer);
-
- // Have the header processed
- _header = new HeaderBlock(headerBuffer);
-
- // Sanity check the block count
- BlockAllocationTableReader.sanityCheckBlockCount(_header.getBATCount());
-
- // We need to buffer the whole file into memory when
- // working with an InputStream.
- // The max possible size is when each BAT block entry is used
- long maxSize = BATBlock.calculateMaximumSize(_header);
- if (maxSize > Integer.MAX_VALUE) {
- throw new IllegalArgumentException("Unable read a >2gb file via an InputStream");
- }
- ByteBuffer data = ByteBuffer.allocate((int)maxSize);
-
- // Copy in the header
- headerBuffer.position(0);
- data.put(headerBuffer);
- data.position(headerBuffer.capacity());
-
- // Now read the rest of the stream
- IOUtils.readFully(channel, data);
- success = true;
-
- // Turn it into a DataSource
- _data = new ByteArrayBackedDataSource(data.array(), data.position());
- } finally {
- // As per the constructor contract, always close the stream
- if(channel != null)
- channel.close();
- closeInputStream(stream, success);
- }
-
- // Now process the various entries
- readCoreContents();
- }
- /**
- * @param stream the stream to be closed
- * @param success <code>false</code> if an exception is currently being thrown in the calling method
- */
- private void closeInputStream(InputStream stream, boolean success) {
- try {
- stream.close();
- } catch (IOException e) {
- if(success) {
- throw new RuntimeException(e);
- }
- // else not success? Try block did not complete normally
- // just print stack trace and leave original ex to be thrown
- LOG.log(POILogger.ERROR, "can't close input stream", e);
- }
- }
-
- /**
- * Read and process the PropertiesTable and the
- * FAT / XFAT blocks, so that we're ready to
- * work with the file
- */
- private void readCoreContents() throws IOException {
- // Grab the block size
- bigBlockSize = _header.getBigBlockSize();
-
- // Each block should only ever be used by one of the
- // FAT, XFAT or Property Table. Ensure it does
- ChainLoopDetector loopDetector = getChainLoopDetector();
-
- // Read the FAT blocks
- for(int fatAt : _header.getBATArray()) {
- readBAT(fatAt, loopDetector);
- }
-
- // Work out how many FAT blocks remain in the XFATs
- int remainingFATs = _header.getBATCount() - _header.getBATArray().length;
-
- // Now read the XFAT blocks, and the FATs within them
- BATBlock xfat;
- int nextAt = _header.getXBATIndex();
- for(int i=0; i<_header.getXBATCount(); i++) {
- loopDetector.claim(nextAt);
- ByteBuffer fatData = getBlockAt(nextAt);
- xfat = BATBlock.createBATBlock(bigBlockSize, fatData);
- xfat.setOurBlockIndex(nextAt);
- nextAt = xfat.getValueAt(bigBlockSize.getXBATEntriesPerBlock());
- _xbat_blocks.add(xfat);
-
- // Process all the (used) FATs from this XFAT
- int xbatFATs = Math.min(remainingFATs, bigBlockSize.getXBATEntriesPerBlock());
- for(int j=0; j<xbatFATs; j++) {
- int fatAt = xfat.getValueAt(j);
- if(fatAt == POIFSConstants.UNUSED_BLOCK || fatAt == POIFSConstants.END_OF_CHAIN) break;
- readBAT(fatAt, loopDetector);
- }
- remainingFATs -= xbatFATs;
- }
-
- // We're now able to load steams
- // Use this to read in the properties
- _property_table = new NPropertyTable(_header, this);
-
- // Finally read the Small Stream FAT (SBAT) blocks
- BATBlock sfat;
- List<BATBlock> sbats = new ArrayList<>();
- _mini_store = new NPOIFSMiniStore(this, _property_table.getRoot(), sbats, _header);
- nextAt = _header.getSBATStart();
- for(int i=0; i<_header.getSBATCount() && nextAt != POIFSConstants.END_OF_CHAIN; i++) {
- loopDetector.claim(nextAt);
- ByteBuffer fatData = getBlockAt(nextAt);
- sfat = BATBlock.createBATBlock(bigBlockSize, fatData);
- sfat.setOurBlockIndex(nextAt);
- sbats.add(sfat);
- nextAt = getNextBlock(nextAt);
- }
- }
- private void readBAT(int batAt, ChainLoopDetector loopDetector) throws IOException {
- loopDetector.claim(batAt);
- ByteBuffer fatData = getBlockAt(batAt);
- BATBlock bat = BATBlock.createBATBlock(bigBlockSize, fatData);
- bat.setOurBlockIndex(batAt);
- _bat_blocks.add(bat);
- }
- private BATBlock createBAT(int offset, boolean isBAT) throws IOException {
- // Create a new BATBlock
- BATBlock newBAT = BATBlock.createEmptyBATBlock(bigBlockSize, !isBAT);
- newBAT.setOurBlockIndex(offset);
- // Ensure there's a spot in the file for it
- ByteBuffer buffer = ByteBuffer.allocate(bigBlockSize.getBigBlockSize());
- int writeTo = (1+offset) * bigBlockSize.getBigBlockSize(); // Header isn't in BATs
- _data.write(buffer, writeTo);
- // All done
- return newBAT;
- }
-
- /**
- * Load the block at the given offset.
- */
- @Override
- protected ByteBuffer getBlockAt(final int offset) throws IOException {
- // The header block doesn't count, so add one
- long blockWanted = offset + 1L;
- long startAt = blockWanted * bigBlockSize.getBigBlockSize();
- try {
- return _data.read(bigBlockSize.getBigBlockSize(), startAt);
- } catch (IndexOutOfBoundsException e) {
- IndexOutOfBoundsException wrapped = new IndexOutOfBoundsException("Block " + offset + " not found");
- wrapped.initCause(e);
- throw wrapped;
- }
- }
-
- /**
- * Load the block at the given offset,
- * extending the file if needed
- */
- @Override
- protected ByteBuffer createBlockIfNeeded(final int offset) throws IOException {
- try {
- return getBlockAt(offset);
- } catch(IndexOutOfBoundsException e) {
- // The header block doesn't count, so add one
- long startAt = (offset+1L) * bigBlockSize.getBigBlockSize();
- // Allocate and write
- ByteBuffer buffer = ByteBuffer.allocate(getBigBlockSize());
- _data.write(buffer, startAt);
- // Retrieve the properly backed block
- return getBlockAt(offset);
- }
- }
-
- /**
- * Returns the BATBlock that handles the specified offset,
- * and the relative index within it
- */
- @Override
- protected BATBlockAndIndex getBATBlockAndIndex(final int offset) {
- return BATBlock.getBATBlockAndIndex(
- offset, _header, _bat_blocks
- );
- }
-
- /**
- * Works out what block follows the specified one.
- */
- @Override
- protected int getNextBlock(final int offset) {
- BATBlockAndIndex bai = getBATBlockAndIndex(offset);
- return bai.getBlock().getValueAt( bai.getIndex() );
- }
-
- /**
- * Changes the record of what block follows the specified one.
- */
- @Override
- protected void setNextBlock(final int offset, final int nextBlock) {
- BATBlockAndIndex bai = getBATBlockAndIndex(offset);
- bai.getBlock().setValueAt(
- bai.getIndex(), nextBlock
- );
- }
-
- /**
- * Finds a free block, and returns its offset.
- * This method will extend the file if needed, and if doing
- * so, allocate new FAT blocks to address the extra space.
- */
- @Override
- protected int getFreeBlock() throws IOException {
- int numSectors = bigBlockSize.getBATEntriesPerBlock();
-
- // First up, do we have any spare ones?
- int offset = 0;
- for (BATBlock bat : _bat_blocks) {
- if(bat.hasFreeSectors()) {
- // Claim one of them and return it
- for(int j=0; j<numSectors; j++) {
- int batValue = bat.getValueAt(j);
- if(batValue == POIFSConstants.UNUSED_BLOCK) {
- // Bingo
- return offset + j;
- }
- }
- }
-
- // Move onto the next BAT
- offset += numSectors;
- }
-
- // If we get here, then there aren't any free sectors
- // in any of the BATs, so we need another BAT
- BATBlock bat = createBAT(offset, true);
- bat.setValueAt(0, POIFSConstants.FAT_SECTOR_BLOCK);
- _bat_blocks.add(bat);
-
- // Now store a reference to the BAT in the required place
- if(_header.getBATCount() >= 109) {
- // Needs to come from an XBAT
- BATBlock xbat = null;
- for(BATBlock x : _xbat_blocks) {
- if(x.hasFreeSectors()) {
- xbat = x;
- break;
- }
- }
- if(xbat == null) {
- // Oh joy, we need a new XBAT too...
- xbat = createBAT(offset+1, false);
- // Allocate our new BAT as the first block in the XBAT
- xbat.setValueAt(0, offset);
- // And allocate the XBAT in the BAT
- bat.setValueAt(1, POIFSConstants.DIFAT_SECTOR_BLOCK);
-
- // Will go one place higher as XBAT added in
- offset++;
-
- // Chain it
- if(_xbat_blocks.size() == 0) {
- _header.setXBATStart(offset);
- } else {
- _xbat_blocks.get(_xbat_blocks.size()-1).setValueAt(
- bigBlockSize.getXBATEntriesPerBlock(), offset
- );
- }
- _xbat_blocks.add(xbat);
- _header.setXBATCount(_xbat_blocks.size());
- } else {
- // Allocate our BAT in the existing XBAT with space
- for(int i=0; i<bigBlockSize.getXBATEntriesPerBlock(); i++) {
- if(xbat.getValueAt(i) == POIFSConstants.UNUSED_BLOCK) {
- xbat.setValueAt(i, offset);
- break;
- }
- }
- }
- } else {
- // Store us in the header
- int[] newBATs = new int[_header.getBATCount()+1];
- System.arraycopy(_header.getBATArray(), 0, newBATs, 0, newBATs.length-1);
- newBATs[newBATs.length-1] = offset;
- _header.setBATArray(newBATs);
- }
- _header.setBATCount(_bat_blocks.size());
-
- // The current offset stores us, but the next one is free
- return offset+1;
- }
-
- protected long size() throws IOException {
- return _data.size();
- }
-
- @Override
- protected ChainLoopDetector getChainLoopDetector() throws IOException {
- return new ChainLoopDetector(_data.size());
- }
-
- /**
- * For unit testing only! Returns the underlying
- * properties table
- */
- NPropertyTable _get_property_table() {
- return _property_table;
- }
-
- /**
- * Returns the MiniStore, which performs a similar low
- * level function to this, except for the small blocks.
- */
- public NPOIFSMiniStore getMiniStore() {
- return _mini_store;
- }
-
- /**
- * add a new POIFSDocument to the FileSytem
- *
- * @param document the POIFSDocument being added
- */
- void addDocument(final NPOIFSDocument document)
- {
- _property_table.addProperty(document.getDocumentProperty());
- }
-
- /**
- * add a new DirectoryProperty to the FileSystem
- *
- * @param directory the DirectoryProperty being added
- */
- void addDirectory(final DirectoryProperty directory)
- {
- _property_table.addProperty(directory);
- }
-
- /**
- * Create a new document to be added to the root directory
- *
- * @param stream the InputStream from which the document's data
- * will be obtained
- * @param name the name of the new POIFSDocument
- *
- * @return the new DocumentEntry
- *
- * @exception IOException on error creating the new POIFSDocument
- */
-
- public DocumentEntry createDocument(final InputStream stream,
- final String name)
- throws IOException
- {
- return getRoot().createDocument(name, stream);
- }
-
- /**
- * create a new DocumentEntry in the root entry; the data will be
- * provided later
- *
- * @param name the name of the new DocumentEntry
- * @param size the size of the new DocumentEntry
- * @param writer the writer of the new DocumentEntry
- *
- * @return the new DocumentEntry
- *
- * @exception IOException
- */
- public DocumentEntry createDocument(final String name, final int size,
- final POIFSWriterListener writer)
- throws IOException
- {
- return getRoot().createDocument(name, size, writer);
- }
-
- /**
- * create a new DirectoryEntry in the root directory
- *
- * @param name the name of the new DirectoryEntry
- *
- * @return the new DirectoryEntry
- *
- * @exception IOException on name duplication
- */
-
- public DirectoryEntry createDirectory(final String name)
- throws IOException
- {
- return getRoot().createDirectory(name);
- }
-
- /**
- * Set the contents of a document in the root directory,
- * creating if needed, otherwise updating
- *
- * @param stream the InputStream from which the document's data
- * will be obtained
- * @param name the name of the new or existing POIFSDocument
- *
- * @return the new or updated DocumentEntry
- *
- * @exception IOException on error populating the POIFSDocument
- */
-
- public DocumentEntry createOrUpdateDocument(final InputStream stream,
- final String name)
- throws IOException
- {
- return getRoot().createOrUpdateDocument(name, stream);
- }
-
- /**
- * Does the filesystem support an in-place write via
- * {@link #writeFilesystem()} ? If false, only writing out to
- * a brand new file via {@link #writeFilesystem(OutputStream)}
- * is supported.
- */
- public boolean isInPlaceWriteable() {
- if(_data instanceof FileBackedDataSource) {
- if ( ((FileBackedDataSource)_data).isWriteable() ) {
- return true;
- }
- }
- return false;
- }
-
- /**
- * Write the filesystem out to the open file. Will thrown an
- * {@link IllegalArgumentException} if opened from an
- * {@link InputStream}.
- *
- * @exception IOException thrown on errors writing to the stream
- */
- public void writeFilesystem() throws IOException {
- if(_data instanceof FileBackedDataSource) {
- // Good, correct type
- } else {
- throw new IllegalArgumentException(
- "POIFS opened from an inputstream, so writeFilesystem() may " +
- "not be called. Use writeFilesystem(OutputStream) instead"
- );
- }
- if (! ((FileBackedDataSource)_data).isWriteable()) {
- throw new IllegalArgumentException(
- "POIFS opened in read only mode, so writeFilesystem() may " +
- "not be called. Open the FileSystem in read-write mode first"
- );
- }
- syncWithDataSource();
- }
-
- /**
- * Write the filesystem out
- *
- * @param stream the OutputStream to which the filesystem will be
- * written
- *
- * @exception IOException thrown on errors writing to the stream
- */
- public void writeFilesystem(final OutputStream stream) throws IOException {
- // Have the datasource updated
- syncWithDataSource();
-
- // Now copy the contents to the stream
- _data.copyTo(stream);
- }
-
- /**
- * Has our in-memory objects write their state
- * to their backing blocks
- */
- private void syncWithDataSource() throws IOException {
- // Mini Stream + SBATs first, as mini-stream details have
- // to be stored in the Root Property
- _mini_store.syncWithDataSource();
-
- // Properties
- NPOIFSStream propStream = new NPOIFSStream(this, _header.getPropertyStart());
- _property_table.preWrite();
- _property_table.write(propStream);
- // _header.setPropertyStart has been updated on write ...
-
- // HeaderBlock
- HeaderBlockWriter hbw = new HeaderBlockWriter(_header);
- hbw.writeBlock( getBlockAt(-1) );
-
- // BATs
- for(BATBlock bat : _bat_blocks) {
- ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
- BlockAllocationTableWriter.writeBlock(bat, block);
- }
- // XBats
- for(BATBlock bat : _xbat_blocks) {
- ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
- BlockAllocationTableWriter.writeBlock(bat, block);
- }
- }
-
- /**
- * Closes the FileSystem, freeing any underlying files, streams
- * and buffers. After this, you will be unable to read or
- * write from the FileSystem.
- */
- public void close() throws IOException {
- _data.close();
- }
-
- /**
- * read in a file and write it back out again
- *
- * @param args names of the files; arg[ 0 ] is the input file,
- * arg[ 1 ] is the output file
- *
- * @exception IOException
- */
- public static void main(String args[]) throws IOException {
- if (args.length != 2) {
- System.err.println(
- "two arguments required: input filename and output filename");
- System.exit(1);
- }
-
- try (FileInputStream istream = new FileInputStream(args[0])) {
- try (FileOutputStream ostream = new FileOutputStream(args[1])) {
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(istream)) {
- fs.writeFilesystem(ostream);
- }
- }
- }
- }
-
- /**
- * Get the root entry
- *
- * @return the root entry
- */
- public DirectoryNode getRoot() {
- if (_root == null) {
- _root = new DirectoryNode(_property_table.getRoot(), this, null);
- }
- return _root;
- }
-
- /**
- * open a document in the root entry's list of entries
- *
- * @param documentName the name of the document to be opened
- *
- * @return a newly opened DocumentInputStream
- *
- * @exception IOException if the document does not exist or the
- * name is that of a DirectoryEntry
- */
- public DocumentInputStream createDocumentInputStream(
- final String documentName) throws IOException {
- return getRoot().createDocumentInputStream(documentName);
- }
-
- /**
- * remove an entry
- *
- * @param entry to be removed
- */
- void remove(EntryNode entry) throws IOException {
- // If it's a document, free the blocks
- if (entry instanceof DocumentEntry) {
- NPOIFSDocument doc = new NPOIFSDocument((DocumentProperty)entry.getProperty(), this);
- doc.free();
- }
-
- // Now zap it from the properties list
- _property_table.removeProperty(entry.getProperty());
- }
-
- /* ********** START begin implementation of POIFSViewable ********** */
-
- /**
- * Get an array of objects, some of which may implement
- * POIFSViewable
- *
- * @return an array of Object; may not be null, but may be empty
- */
- public Object [] getViewableArray() {
- if (preferArray()) {
- return getRoot().getViewableArray();
- }
-
- return new Object[ 0 ];
- }
-
- /**
- * Get an Iterator of objects, some of which may implement
- * POIFSViewable
- *
- * @return an Iterator; may not be null, but may have an empty
- * back end store
- */
-
- public Iterator<Object> getViewableIterator() {
- if (!preferArray()) {
- return getRoot().getViewableIterator();
- }
-
- return Collections.emptyList().iterator();
- }
-
- /**
- * Give viewers a hint as to whether to call getViewableArray or
- * getViewableIterator
- *
- * @return true if a viewer should call getViewableArray, false if
- * a viewer should call getViewableIterator
- */
-
- public boolean preferArray() {
- return getRoot().preferArray();
- }
-
- /**
- * Provides a short description of the object, to be used when a
- * POIFSViewable object has not provided its contents.
- *
- * @return short description
- */
-
- public String getShortDescription() {
- return "POIFS FileSystem";
- }
-
- /* ********** END begin implementation of POIFSViewable ********** */
-
- /**
- * @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
- */
- public int getBigBlockSize() {
- return bigBlockSize.getBigBlockSize();
- }
-
- /**
- * @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
- */
- public POIFSBigBlockSize getBigBlockSizeDetails() {
- return bigBlockSize;
- }
-
- @Override
- protected int getBlockStoreBlockSize() {
- return getBigBlockSize();
- }
-
- @Internal
- public NPropertyTable getPropertyTable() {
- return _property_table;
- }
-
- @Internal
- public HeaderBlock getHeaderBlock() {
- return _header;
- }
-}
-
+++ /dev/null
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.poifs.filesystem;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.property.RootProperty;
-import org.apache.poi.poifs.storage.BATBlock;
-import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex;
-import org.apache.poi.poifs.storage.BlockAllocationTableWriter;
-import org.apache.poi.poifs.storage.HeaderBlock;
-
-/**
- * This class handles the MiniStream (small block store)
- * in the NIO case for {@link NPOIFSFileSystem}
- */
-public class NPOIFSMiniStore extends BlockStore
-{
- private NPOIFSFileSystem _filesystem;
- private NPOIFSStream _mini_stream;
- private List<BATBlock> _sbat_blocks;
- private HeaderBlock _header;
- private RootProperty _root;
-
- protected NPOIFSMiniStore(NPOIFSFileSystem filesystem, RootProperty root,
- List<BATBlock> sbats, HeaderBlock header)
- {
- this._filesystem = filesystem;
- this._sbat_blocks = sbats;
- this._header = header;
- this._root = root;
-
- this._mini_stream = new NPOIFSStream(filesystem, root.getStartBlock());
- }
-
- /**
- * Load the block at the given offset.
- */
- protected ByteBuffer getBlockAt(final int offset) throws IOException {
- // Which big block is this?
- int byteOffset = offset * POIFSConstants.SMALL_BLOCK_SIZE;
- int bigBlockNumber = byteOffset / _filesystem.getBigBlockSize();
- int bigBlockOffset = byteOffset % _filesystem.getBigBlockSize();
-
- // Now locate the data block for it
- Iterator<ByteBuffer> it = _mini_stream.getBlockIterator();
- for(int i=0; i<bigBlockNumber; i++) {
- it.next();
- }
- ByteBuffer dataBlock = it.next();
- if(dataBlock == null) {
- throw new IndexOutOfBoundsException("Big block " + bigBlockNumber + " outside stream");
- }
-
- // Position ourselves, and take a slice
- dataBlock.position(
- dataBlock.position() + bigBlockOffset
- );
- ByteBuffer miniBuffer = dataBlock.slice();
- miniBuffer.limit(POIFSConstants.SMALL_BLOCK_SIZE);
- return miniBuffer;
- }
-
- /**
- * Load the block, extending the underlying stream if needed
- */
- protected ByteBuffer createBlockIfNeeded(final int offset) throws IOException {
- boolean firstInStore = false;
- if (_mini_stream.getStartBlock() == POIFSConstants.END_OF_CHAIN) {
- firstInStore = true;
- }
-
- // Try to get it without extending the stream
- if (! firstInStore) {
- try {
- return getBlockAt(offset);
- } catch(IndexOutOfBoundsException e) {}
- }
-
- // Need to extend the stream
- // TODO Replace this with proper append support
- // For now, do the extending by hand...
-
- // Ask for another block
- int newBigBlock = _filesystem.getFreeBlock();
- _filesystem.createBlockIfNeeded(newBigBlock);
-
- // If we are the first block to be allocated, initialise the stream
- if (firstInStore) {
- _filesystem._get_property_table().getRoot().setStartBlock(newBigBlock);
- _mini_stream = new NPOIFSStream(_filesystem, newBigBlock);
- } else {
- // Tack it onto the end of our chain
- ChainLoopDetector loopDetector = _filesystem.getChainLoopDetector();
- int block = _mini_stream.getStartBlock();
- while(true) {
- loopDetector.claim(block);
- int next = _filesystem.getNextBlock(block);
- if(next == POIFSConstants.END_OF_CHAIN) {
- break;
- }
- block = next;
- }
- _filesystem.setNextBlock(block, newBigBlock);
- }
-
- // This is now the new end
- _filesystem.setNextBlock(newBigBlock, POIFSConstants.END_OF_CHAIN);
-
- // Now try again, to get the real small block
- return createBlockIfNeeded(offset);
- }
-
- /**
- * Returns the BATBlock that handles the specified offset,
- * and the relative index within it
- */
- protected BATBlockAndIndex getBATBlockAndIndex(final int offset) {
- return BATBlock.getSBATBlockAndIndex(
- offset, _header, _sbat_blocks
- );
- }
-
- /**
- * Works out what block follows the specified one.
- */
- protected int getNextBlock(final int offset) {
- BATBlockAndIndex bai = getBATBlockAndIndex(offset);
- return bai.getBlock().getValueAt( bai.getIndex() );
- }
-
- /**
- * Changes the record of what block follows the specified one.
- */
- protected void setNextBlock(final int offset, final int nextBlock) {
- BATBlockAndIndex bai = getBATBlockAndIndex(offset);
- bai.getBlock().setValueAt(
- bai.getIndex(), nextBlock
- );
- }
-
- /**
- * Finds a free block, and returns its offset.
- * This method will extend the file if needed, and if doing
- * so, allocate new FAT blocks to address the extra space.
- */
- protected int getFreeBlock() throws IOException {
- int sectorsPerSBAT = _filesystem.getBigBlockSizeDetails().getBATEntriesPerBlock();
-
- // First up, do we have any spare ones?
- int offset = 0;
- for (BATBlock sbat : _sbat_blocks) {
- // Check this one
- if (sbat.hasFreeSectors()) {
- // Claim one of them and return it
- for (int j = 0; j < sectorsPerSBAT; j++) {
- int sbatValue = sbat.getValueAt(j);
- if (sbatValue == POIFSConstants.UNUSED_BLOCK) {
- // Bingo
- return offset + j;
- }
- }
- }
-
- // Move onto the next SBAT
- offset += sectorsPerSBAT;
- }
-
- // If we get here, then there aren't any
- // free sectors in any of the SBATs
- // So, we need to extend the chain and add another
-
- // Create a new BATBlock
- BATBlock newSBAT = BATBlock.createEmptyBATBlock(_filesystem.getBigBlockSizeDetails(), false);
- int batForSBAT = _filesystem.getFreeBlock();
- newSBAT.setOurBlockIndex(batForSBAT);
-
- // Are we the first SBAT?
- if(_header.getSBATCount() == 0) {
- // Tell the header that we've got our first SBAT there
- _header.setSBATStart(batForSBAT);
- _header.setSBATBlockCount(1);
- } else {
- // Find the end of the SBAT stream, and add the sbat in there
- ChainLoopDetector loopDetector = _filesystem.getChainLoopDetector();
- int batOffset = _header.getSBATStart();
- while(true) {
- loopDetector.claim(batOffset);
- int nextBat = _filesystem.getNextBlock(batOffset);
- if(nextBat == POIFSConstants.END_OF_CHAIN) {
- break;
- }
- batOffset = nextBat;
- }
-
- // Add it in at the end
- _filesystem.setNextBlock(batOffset, batForSBAT);
-
- // And update the count
- _header.setSBATBlockCount(
- _header.getSBATCount() + 1
- );
- }
-
- // Finish allocating
- _filesystem.setNextBlock(batForSBAT, POIFSConstants.END_OF_CHAIN);
- _sbat_blocks.add(newSBAT);
-
- // Return our first spot
- return offset;
- }
-
- @Override
- protected ChainLoopDetector getChainLoopDetector() throws IOException {
- return new ChainLoopDetector( _root.getSize() );
- }
-
- protected int getBlockStoreBlockSize() {
- return POIFSConstants.SMALL_BLOCK_SIZE;
- }
-
- /**
- * Writes the SBATs to their backing blocks, and updates
- * the mini-stream size in the properties. Stream size is
- * based on full blocks used, not the data within the streams
- */
- protected void syncWithDataSource() throws IOException {
- int blocksUsed = 0;
- for (BATBlock sbat : _sbat_blocks) {
- ByteBuffer block = _filesystem.getBlockAt(sbat.getOurBlockIndex());
- BlockAllocationTableWriter.writeBlock(sbat, block);
-
- if (!sbat.hasFreeSectors()) {
- blocksUsed += _filesystem.getBigBlockSizeDetails().getBATEntriesPerBlock();
- } else {
- blocksUsed += sbat.getUsedSectors(false);
- }
- }
- // Set the size on the root in terms of the number of SBAT blocks
- // RootProperty.setSize does the sbat -> bytes conversion for us
- _filesystem._get_property_table().getRoot().setSize(blocksUsed);
- }
-}
+++ /dev/null
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.poifs.filesystem;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.filesystem.BlockStore.ChainLoopDetector;
-import org.apache.poi.poifs.property.Property;
-import org.apache.poi.poifs.storage.HeaderBlock;
-
-/**
- * This handles reading and writing a stream within a
- * {@link NPOIFSFileSystem}. It can supply an iterator
- * to read blocks, and way to write out to existing and
- * new blocks.
- * Most users will want a higher level version of this,
- * which deals with properties to track which stream
- * this is.
- * This only works on big block streams, it doesn't
- * handle small block ones.
- * This uses the new NIO code
- *
- * TODO Implement a streaming write method, and append
- */
-
-public class NPOIFSStream implements Iterable<ByteBuffer>
-{
- private BlockStore blockStore;
- private int startBlock;
- private OutputStream outStream;
-
- /**
- * Constructor for an existing stream. It's up to you
- * to know how to get the start block (eg from a
- * {@link HeaderBlock} or a {@link Property})
- */
- public NPOIFSStream(BlockStore blockStore, int startBlock) {
- this.blockStore = blockStore;
- this.startBlock = startBlock;
- }
-
- /**
- * Constructor for a new stream. A start block won't
- * be allocated until you begin writing to it.
- */
- public NPOIFSStream(BlockStore blockStore) {
- this.blockStore = blockStore;
- this.startBlock = POIFSConstants.END_OF_CHAIN;
- }
-
- /**
- * What block does this stream start at?
- * Will be {@link POIFSConstants#END_OF_CHAIN} for a
- * new stream that hasn't been written to yet.
- */
- public int getStartBlock() {
- return startBlock;
- }
-
- /**
- * Returns an iterator that'll supply one {@link ByteBuffer}
- * per block in the stream.
- */
- public Iterator<ByteBuffer> iterator() {
- return getBlockIterator();
- }
-
- public Iterator<ByteBuffer> getBlockIterator() {
- if(startBlock == POIFSConstants.END_OF_CHAIN) {
- throw new IllegalStateException(
- "Can't read from a new stream before it has been written to"
- );
- }
- return new StreamBlockByteBufferIterator(startBlock);
- }
-
- /**
- * Updates the contents of the stream to the new
- * set of bytes.
- * Note - if this is property based, you'll still
- * need to update the size in the property yourself
- */
- public void updateContents(byte[] contents) throws IOException {
- OutputStream os = getOutputStream();
- os.write(contents);
- os.close();
- }
-
- public OutputStream getOutputStream() throws IOException {
- if (outStream == null) {
- outStream = new StreamBlockByteBuffer();
- }
- return outStream;
- }
-
- // TODO Streaming write support
- // TODO then convert fixed sized write to use streaming internally
- // TODO Append write support (probably streaming)
-
- /**
- * Frees all blocks in the stream
- */
- public void free() throws IOException {
- ChainLoopDetector loopDetector = blockStore.getChainLoopDetector();
- free(loopDetector);
- }
- private void free(ChainLoopDetector loopDetector) {
- int nextBlock = startBlock;
- while(nextBlock != POIFSConstants.END_OF_CHAIN) {
- int thisBlock = nextBlock;
- loopDetector.claim(thisBlock);
- nextBlock = blockStore.getNextBlock(thisBlock);
- blockStore.setNextBlock(thisBlock, POIFSConstants.UNUSED_BLOCK);
- }
- this.startBlock = POIFSConstants.END_OF_CHAIN;
- }
-
- /**
- * Class that handles a streaming read of one stream
- */
- protected class StreamBlockByteBufferIterator implements Iterator<ByteBuffer> {
- private ChainLoopDetector loopDetector;
- private int nextBlock;
-
- protected StreamBlockByteBufferIterator(int firstBlock) {
- this.nextBlock = firstBlock;
- try {
- this.loopDetector = blockStore.getChainLoopDetector();
- } catch(IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- public boolean hasNext() {
- if(nextBlock == POIFSConstants.END_OF_CHAIN) {
- return false;
- }
- return true;
- }
-
- public ByteBuffer next() {
- if(nextBlock == POIFSConstants.END_OF_CHAIN) {
- throw new IndexOutOfBoundsException("Can't read past the end of the stream");
- }
-
- try {
- loopDetector.claim(nextBlock);
- ByteBuffer data = blockStore.getBlockAt(nextBlock);
- nextBlock = blockStore.getNextBlock(nextBlock);
- return data;
- } catch(IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- public void remove() {
- throw new UnsupportedOperationException();
- }
- }
-
- protected class StreamBlockByteBuffer extends OutputStream {
- byte oneByte[] = new byte[1];
- ByteBuffer buffer;
- // Make sure we don't encounter a loop whilst overwriting
- // the existing blocks
- ChainLoopDetector loopDetector;
- int prevBlock, nextBlock;
-
- protected StreamBlockByteBuffer() throws IOException {
- loopDetector = blockStore.getChainLoopDetector();
- prevBlock = POIFSConstants.END_OF_CHAIN;
- nextBlock = startBlock;
- }
-
- protected void createBlockIfNeeded() throws IOException {
- if (buffer != null && buffer.hasRemaining()) return;
-
- int thisBlock = nextBlock;
-
- // Allocate a block if needed, otherwise figure
- // out what the next block will be
- if(thisBlock == POIFSConstants.END_OF_CHAIN) {
- thisBlock = blockStore.getFreeBlock();
- loopDetector.claim(thisBlock);
-
- // We're on the end of the chain
- nextBlock = POIFSConstants.END_OF_CHAIN;
-
- // Mark the previous block as carrying on to us if needed
- if(prevBlock != POIFSConstants.END_OF_CHAIN) {
- blockStore.setNextBlock(prevBlock, thisBlock);
- }
- blockStore.setNextBlock(thisBlock, POIFSConstants.END_OF_CHAIN);
-
- // If we've just written the first block on a
- // new stream, save the start block offset
- if(startBlock == POIFSConstants.END_OF_CHAIN) {
- startBlock = thisBlock;
- }
- } else {
- loopDetector.claim(thisBlock);
- nextBlock = blockStore.getNextBlock(thisBlock);
- }
-
- buffer = blockStore.createBlockIfNeeded(thisBlock);
-
- // Update pointers
- prevBlock = thisBlock;
- }
-
- public void write(int b) throws IOException {
- oneByte[0] = (byte)(b & 0xFF);
- write(oneByte);
- }
-
- public void write(byte[] b, int off, int len) throws IOException {
- if ((off < 0) || (off > b.length) || (len < 0) ||
- ((off + len) > b.length) || ((off + len) < 0)) {
- throw new IndexOutOfBoundsException();
- } else if (len == 0) {
- return;
- }
-
- do {
- createBlockIfNeeded();
- int writeBytes = Math.min(buffer.remaining(), len);
- buffer.put(b, off, writeBytes);
- off += writeBytes;
- len -= writeBytes;
- } while (len > 0);
- }
-
- public void close() throws IOException {
- // If we're overwriting, free any remaining blocks
- NPOIFSStream toFree = new NPOIFSStream(blockStore, nextBlock);
- toFree.free(loopDetector);
-
- // Mark the end of the stream, if we have any data
- if (prevBlock != POIFSConstants.END_OF_CHAIN) {
- blockStore.setNextBlock(prevBlock, POIFSConstants.END_OF_CHAIN);
- }
- }
- }
-}
-
--- /dev/null
+/* ====================================================================
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+==================================================================== */
+
+package org.apache.poi.poifs.filesystem;
+
+import static java.util.Collections.emptyList;
+
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+
+import org.apache.poi.poifs.common.POIFSConstants;
+import org.apache.poi.poifs.dev.POIFSViewable;
+import org.apache.poi.poifs.property.DocumentProperty;
+import org.apache.poi.util.HexDump;
+import org.apache.poi.util.IOUtils;
+
+/**
+ * This class manages a document in the NIO POIFS filesystem.
+ * This is the {@link POIFSFileSystem} version.
+ */
+public final class POIFSDocument implements POIFSViewable, Iterable<ByteBuffer> {
+
+ //arbitrarily selected; may need to increase
+ private static final int MAX_RECORD_LENGTH = 100_000;
+
+ private DocumentProperty _property;
+
+ private POIFSFileSystem _filesystem;
+ private POIFSStream _stream;
+ private int _block_size;
+
+ /**
+ * Constructor for an existing Document
+ */
+ public POIFSDocument(DocumentNode document) {
+ this((DocumentProperty)document.getProperty(),
+ ((DirectoryNode)document.getParent()).getNFileSystem());
+ }
+
+ /**
+ * Constructor for an existing Document
+ */
+ public POIFSDocument(DocumentProperty property, POIFSFileSystem filesystem) {
+ this._property = property;
+ this._filesystem = filesystem;
+
+ if(property.getSize() < POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
+ _stream = new POIFSStream(_filesystem.getMiniStore(), property.getStartBlock());
+ _block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
+ } else {
+ _stream = new POIFSStream(_filesystem, property.getStartBlock());
+ _block_size = _filesystem.getBlockStoreBlockSize();
+ }
+ }
+
+ /**
+ * Constructor for a new Document
+ *
+ * @param name the name of the POIFSDocument
+ * @param stream the InputStream we read data from
+ */
+ public POIFSDocument(String name, POIFSFileSystem filesystem, InputStream stream)
+ throws IOException
+ {
+ this._filesystem = filesystem;
+
+ // Store it
+ int length = store(stream);
+
+ // Build the property for it
+ this._property = new DocumentProperty(name, length);
+ _property.setStartBlock(_stream.getStartBlock());
+ _property.setDocument(this);
+ }
+
+ public POIFSDocument(String name, final int size, POIFSFileSystem filesystem, POIFSWriterListener writer)
+ throws IOException
+ {
+ this._filesystem = filesystem;
+
+ if (size < POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
+ _stream = new POIFSStream(filesystem.getMiniStore());
+ _block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
+ } else {
+ _stream = new POIFSStream(filesystem);
+ _block_size = _filesystem.getBlockStoreBlockSize();
+ }
+
+ this._property = new DocumentProperty(name, size);
+ _property.setStartBlock(_stream.getStartBlock());
+ _property.setDocument(this);
+
+ try (DocumentOutputStream os = new DocumentOutputStream(this, size)) {
+ POIFSDocumentPath path = new POIFSDocumentPath(name.split("\\\\"));
+ String docName = path.getComponent(path.length() - 1);
+ POIFSWriterEvent event = new POIFSWriterEvent(os, path, docName, size);
+ writer.processPOIFSWriterEvent(event);
+ }
+ }
+
+ /**
+ * Stores the given data for this Document
+ */
+ private int store(InputStream stream) throws IOException {
+ final int bigBlockSize = POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE;
+ BufferedInputStream bis = new BufferedInputStream(stream, bigBlockSize+1);
+ bis.mark(bigBlockSize);
+
+ // Do we need to store as a mini stream or a full one?
+ long streamBlockSize = IOUtils.skipFully(bis, bigBlockSize);
+ if (streamBlockSize < bigBlockSize) {
+ _stream = new POIFSStream(_filesystem.getMiniStore());
+ _block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
+ } else {
+ _stream = new POIFSStream(_filesystem);
+ _block_size = _filesystem.getBlockStoreBlockSize();
+ }
+
+ // start from the beginning
+ bis.reset();
+
+ // Store it
+ final long length;
+ try (OutputStream os = _stream.getOutputStream()) {
+ length = IOUtils.copy(bis, os);
+
+ // Pad to the end of the block with -1s
+ int usedInBlock = (int) (length % _block_size);
+ if (usedInBlock != 0 && usedInBlock != _block_size) {
+ int toBlockEnd = _block_size - usedInBlock;
+ byte[] padding = IOUtils.safelyAllocate(toBlockEnd, MAX_RECORD_LENGTH);
+ Arrays.fill(padding, (byte) 0xFF);
+ os.write(padding);
+ }
+ }
+
+ return (int)length;
+ }
+
+ /**
+ * Frees the underlying stream and property
+ */
+ void free() throws IOException {
+ _stream.free();
+ _property.setStartBlock(POIFSConstants.END_OF_CHAIN);
+ }
+
+ POIFSFileSystem getFileSystem()
+ {
+ return _filesystem;
+ }
+
+ int getDocumentBlockSize() {
+ return _block_size;
+ }
+
+ @Override
+ public Iterator<ByteBuffer> iterator() {
+ return getBlockIterator();
+ }
+
+ Iterator<ByteBuffer> getBlockIterator() {
+ return (getSize() > 0 ? _stream : Collections.<ByteBuffer>emptyList()).iterator();
+ }
+
+ /**
+ * @return size of the document
+ */
+ public int getSize() {
+ return _property.getSize();
+ }
+
+ public void replaceContents(InputStream stream) throws IOException {
+ free();
+ int size = store(stream);
+ _property.setStartBlock(_stream.getStartBlock());
+ _property.updateSize(size);
+ }
+
+ /**
+ * @return the instance's DocumentProperty
+ */
+ DocumentProperty getDocumentProperty() {
+ return _property;
+ }
+
+ /**
+ * Get an array of objects, some of which may implement POIFSViewable
+ *
+ * @return an array of Object; may not be null, but may be empty
+ */
+ public Object[] getViewableArray() {
+ String result = "<NO DATA>";
+
+ if(getSize() > 0) {
+ // Get all the data into a single array
+ byte[] data = IOUtils.safelyAllocate(getSize(), MAX_RECORD_LENGTH);
+ int offset = 0;
+ for(ByteBuffer buffer : _stream) {
+ int length = Math.min(_block_size, data.length-offset);
+ buffer.get(data, offset, length);
+ offset += length;
+ }
+
+ result = HexDump.dump(data, 0, 0);
+ }
+
+ return new String[]{ result };
+ }
+
+ /**
+ * Get an Iterator of objects, some of which may implement POIFSViewable
+ *
+ * @return an Iterator; may not be null, but may have an empty back end
+ * store
+ */
+ public Iterator<Object> getViewableIterator() {
+ return emptyList().iterator();
+ }
+
+ /**
+ * Give viewers a hint as to whether to call getViewableArray or
+ * getViewableIterator
+ *
+ * @return <code>true</code> if a viewer should call getViewableArray,
+ * <code>false</code> if a viewer should call getViewableIterator
+ */
+ public boolean preferArray() {
+ return true;
+ }
+
+ /**
+ * Provides a short description of the object, to be used when a
+ * POIFSViewable object has not provided its contents.
+ *
+ * @return short description
+ */
+ public String getShortDescription() {
+
+ return "Document: \"" + _property.getName() + "\" size = " + getSize();
+ }
+}
package org.apache.poi.poifs.filesystem;
-import java.io.*;
+import java.io.ByteArrayOutputStream;
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
+import java.nio.channels.FileChannel;
+import java.nio.channels.ReadableByteChannel;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import org.apache.poi.EmptyFileException;
+import org.apache.poi.poifs.common.POIFSBigBlockSize;
+import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.dev.POIFSViewable;
-import org.apache.poi.util.CloseIgnoringInputStream;
+import org.apache.poi.poifs.nio.ByteArrayBackedDataSource;
+import org.apache.poi.poifs.nio.DataSource;
+import org.apache.poi.poifs.nio.FileBackedDataSource;
+import org.apache.poi.poifs.property.DirectoryProperty;
+import org.apache.poi.poifs.property.DocumentProperty;
+import org.apache.poi.poifs.property.PropertyTable;
+import org.apache.poi.poifs.storage.BATBlock;
+import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex;
+import org.apache.poi.poifs.storage.HeaderBlock;
+import org.apache.poi.util.IOUtils;
+import org.apache.poi.util.Internal;
+import org.apache.poi.util.POILogFactory;
+import org.apache.poi.util.POILogger;
/**
- * Transition class for the move from {@link POIFSFileSystem} to
- * {@link OPOIFSFileSystem}, and from {@link NPOIFSFileSystem} to
- * {@link POIFSFileSystem}.
- * <p>This has been updated to be powered by the NIO-based NPOIFS
- * {@link NPOIFSFileSystem}.
+ * <p>This is the main class of the POIFS system; it manages the entire
+ * life cycle of the filesystem.</p>
+ * <p>This is the new NIO version, which uses less memory</p>
*/
-public class POIFSFileSystem
- extends NPOIFSFileSystem // TODO Temporary workaround during #56791
- implements POIFSViewable {
+
+public class POIFSFileSystem extends BlockStore
+ implements POIFSViewable, Closeable
+{
+ //arbitrarily selected; may need to increase
+ private static final int MAX_RECORD_LENGTH = 100_000;
+
+ private static final POILogger LOG = POILogFactory.getLogger(POIFSFileSystem.class);
+
/**
- * Convenience method for clients that want to avoid the auto-close behaviour of the constructor.
+ * Maximum number size (in blocks) of the allocation table as supported by
+ * POI.<p>
+ *
+ * This constant has been chosen to help POI identify corrupted data in the
+ * header block (rather than crash immediately with {@link OutOfMemoryError}
+ * ). It's not clear if the compound document format actually specifies any
+ * upper limits. For files with 512 byte blocks, having an allocation table
+ * of 65,335 blocks would correspond to a total file size of 4GB. Needless
+ * to say, POI probably cannot handle files anywhere near that size.
*/
- public static InputStream createNonClosingInputStream(InputStream is) {
- return new CloseIgnoringInputStream(is);
- }
+ private static final int MAX_BLOCK_COUNT = 65535;
+ private POIFSMiniStore _mini_store;
+ private PropertyTable _property_table;
+ private List<BATBlock> _xbat_blocks;
+ private List<BATBlock> _bat_blocks;
+ private HeaderBlock _header;
+ private DirectoryNode _root;
+
+ private DataSource _data;
+
+ /**
+ * What big block size the file uses. Most files
+ * use 512 bytes, but a few use 4096
+ */
+ private POIFSBigBlockSize bigBlockSize =
+ POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;
+
+ private POIFSFileSystem(boolean newFS)
+ {
+ _header = new HeaderBlock(bigBlockSize);
+ _property_table = new PropertyTable(_header);
+ _mini_store = new POIFSMiniStore(this, _property_table.getRoot(), new ArrayList<>(), _header);
+ _xbat_blocks = new ArrayList<>();
+ _bat_blocks = new ArrayList<>();
+ _root = null;
+
+ if(newFS) {
+ // Data needs to initially hold just the header block,
+ // a single bat block, and an empty properties section
+ _data = new ByteArrayBackedDataSource(IOUtils.safelyAllocate(
+ bigBlockSize.getBigBlockSize()*3, MAX_RECORD_LENGTH));
+ }
+ }
+
/**
* Constructor, intended for writing
*/
- public POIFSFileSystem() {
- super();
+ public POIFSFileSystem()
+ {
+ this(true);
+
+ // Reserve block 0 for the start of the Properties Table
+ // Create a single empty BAT, at pop that at offset 1
+ _header.setBATCount(1);
+ _header.setBATArray(new int[] { 1 });
+ BATBlock bb = BATBlock.createEmptyBATBlock(bigBlockSize, false);
+ bb.setOurBlockIndex(1);
+ _bat_blocks.add(bb);
+
+ setNextBlock(0, POIFSConstants.END_OF_CHAIN);
+ setNextBlock(1, POIFSConstants.FAT_SECTOR_BLOCK);
+
+ _property_table.setStartBlock(0);
}
+ /**
+ * <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
+ * creating from an <tt>InputStream</tt>. The File will be opened read-only</p>
+ *
+ * <p>Note that with this constructor, you will need to call {@link #close()}
+ * when you're done to have the underlying file closed, as the file is
+ * kept open during normal operation to read the data out.</p>
+ *
+ * @param file the File from which to read the data
+ *
+ * @exception IOException on errors reading, or on invalid data
+ */
+ public POIFSFileSystem(File file)
+ throws IOException
+ {
+ this(file, true);
+ }
+
+ /**
+ * <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
+ * creating from an <tt>InputStream</tt>.</p>
+ *
+ * <p>Note that with this constructor, you will need to call {@link #close()}
+ * when you're done to have the underlying file closed, as the file is
+ * kept open during normal operation to read the data out.</p>
+ *
+ * @param file the File from which to read or read/write the data
+ * @param readOnly whether the POIFileSystem will only be used in read-only mode
+ *
+ * @exception IOException on errors reading, or on invalid data
+ */
+ public POIFSFileSystem(File file, boolean readOnly)
+ throws IOException
+ {
+ this(null, file, readOnly, true);
+ }
+
+ /**
+ * <p>Creates a POIFSFileSystem from an open <tt>FileChannel</tt>. This uses
+ * less memory than creating from an <tt>InputStream</tt>. The stream will
+ * be used in read-only mode.</p>
+ *
+ * <p>Note that with this constructor, you will need to call {@link #close()}
+ * when you're done to have the underlying Channel closed, as the channel is
+ * kept open during normal operation to read the data out.</p>
+ *
+ * @param channel the FileChannel from which to read the data
+ *
+ * @exception IOException on errors reading, or on invalid data
+ */
+ public POIFSFileSystem(FileChannel channel)
+ throws IOException
+ {
+ this(channel, true);
+ }
+
+ /**
+ * <p>Creates a POIFSFileSystem from an open <tt>FileChannel</tt>. This uses
+ * less memory than creating from an <tt>InputStream</tt>.</p>
+ *
+ * <p>Note that with this constructor, you will need to call {@link #close()}
+ * when you're done to have the underlying Channel closed, as the channel is
+ * kept open during normal operation to read the data out.</p>
+ *
+ * @param channel the FileChannel from which to read or read/write the data
+ * @param readOnly whether the POIFileSystem will only be used in read-only mode
+ *
+ * @exception IOException on errors reading, or on invalid data
+ */
+ public POIFSFileSystem(FileChannel channel, boolean readOnly)
+ throws IOException
+ {
+ this(channel, null, readOnly, false);
+ }
+
+ private POIFSFileSystem(FileChannel channel, File srcFile, boolean readOnly, boolean closeChannelOnError)
+ throws IOException
+ {
+ this(false);
+
+ try {
+ // Initialize the datasource
+ if (srcFile != null) {
+ if (srcFile.length() == 0)
+ throw new EmptyFileException();
+
+ FileBackedDataSource d = new FileBackedDataSource(srcFile, readOnly);
+ channel = d.getChannel();
+ _data = d;
+ } else {
+ _data = new FileBackedDataSource(channel, readOnly);
+ }
+
+ // Get the header
+ ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
+ IOUtils.readFully(channel, headerBuffer);
+
+ // Have the header processed
+ _header = new HeaderBlock(headerBuffer);
+
+ // Now process the various entries
+ readCoreContents();
+ } catch(IOException | RuntimeException e) {
+ // Comes from Iterators etc.
+ // TODO Decide if we can handle these better whilst
+ // still sticking to the iterator contract
+ if (closeChannelOnError && channel != null) {
+ channel.close();
+ }
+ throw e;
+ }
+ }
+
/**
* Create a POIFSFileSystem from an <tt>InputStream</tt>. Normally the stream is read until
* EOF. The stream is always closed.<p>
* @exception IOException on errors reading, or on invalid data
*/
- public POIFSFileSystem(InputStream stream) throws IOException {
- super(stream);
+ public POIFSFileSystem(InputStream stream)
+ throws IOException
+ {
+ this(false);
+
+ boolean success = false;
+ try (ReadableByteChannel channel = Channels.newChannel(stream)) {
+ // Turn our InputStream into something NIO based
+
+ // Get the header
+ ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
+ IOUtils.readFully(channel, headerBuffer);
+
+ // Have the header processed
+ _header = new HeaderBlock(headerBuffer);
+
+ // Sanity check the block count
+ sanityCheckBlockCount(_header.getBATCount());
+
+ // We need to buffer the whole file into memory when
+ // working with an InputStream.
+ // The max possible size is when each BAT block entry is used
+ long maxSize = BATBlock.calculateMaximumSize(_header);
+ if (maxSize > Integer.MAX_VALUE) {
+ throw new IllegalArgumentException("Unable read a >2gb file via an InputStream");
+ }
+ ByteBuffer data = ByteBuffer.allocate((int) maxSize);
+
+ // Copy in the header
+ headerBuffer.position(0);
+ data.put(headerBuffer);
+ data.position(headerBuffer.capacity());
+
+ // Now read the rest of the stream
+ IOUtils.readFully(channel, data);
+ success = true;
+
+ // Turn it into a DataSource
+ _data = new ByteArrayBackedDataSource(data.array(), data.position());
+ } finally {
+ // As per the constructor contract, always close the stream
+ closeInputStream(stream, success);
+ }
+
+ // Now process the various entries
+ readCoreContents();
+ }
+ /**
+ * @param stream the stream to be closed
+ * @param success <code>false</code> if an exception is currently being thrown in the calling method
+ */
+ private void closeInputStream(InputStream stream, boolean success) {
+ try {
+ stream.close();
+ } catch (IOException e) {
+ if(success) {
+ throw new RuntimeException(e);
+ }
+ // else not success? Try block did not complete normally
+ // just print stack trace and leave original ex to be thrown
+ LOG.log(POILogger.ERROR, "can't close input stream", e);
+ }
+ }
+
+ /**
+ * Read and process the PropertiesTable and the
+ * FAT / XFAT blocks, so that we're ready to
+ * work with the file
+ */
+ private void readCoreContents() throws IOException {
+ // Grab the block size
+ bigBlockSize = _header.getBigBlockSize();
+
+ // Each block should only ever be used by one of the
+ // FAT, XFAT or Property Table. Ensure it does
+ ChainLoopDetector loopDetector = getChainLoopDetector();
+
+ // Read the FAT blocks
+ for(int fatAt : _header.getBATArray()) {
+ readBAT(fatAt, loopDetector);
+ }
+
+ // Work out how many FAT blocks remain in the XFATs
+ int remainingFATs = _header.getBATCount() - _header.getBATArray().length;
+
+ // Now read the XFAT blocks, and the FATs within them
+ BATBlock xfat;
+ int nextAt = _header.getXBATIndex();
+ for(int i=0; i<_header.getXBATCount(); i++) {
+ loopDetector.claim(nextAt);
+ ByteBuffer fatData = getBlockAt(nextAt);
+ xfat = BATBlock.createBATBlock(bigBlockSize, fatData);
+ xfat.setOurBlockIndex(nextAt);
+ nextAt = xfat.getValueAt(bigBlockSize.getXBATEntriesPerBlock());
+ _xbat_blocks.add(xfat);
+
+ // Process all the (used) FATs from this XFAT
+ int xbatFATs = Math.min(remainingFATs, bigBlockSize.getXBATEntriesPerBlock());
+ for(int j=0; j<xbatFATs; j++) {
+ int fatAt = xfat.getValueAt(j);
+ if(fatAt == POIFSConstants.UNUSED_BLOCK || fatAt == POIFSConstants.END_OF_CHAIN) break;
+ readBAT(fatAt, loopDetector);
+ }
+ remainingFATs -= xbatFATs;
+ }
+
+ // We're now able to load steams
+ // Use this to read in the properties
+ _property_table = new PropertyTable(_header, this);
+
+ // Finally read the Small Stream FAT (SBAT) blocks
+ BATBlock sfat;
+ List<BATBlock> sbats = new ArrayList<>();
+ _mini_store = new POIFSMiniStore(this, _property_table.getRoot(), sbats, _header);
+ nextAt = _header.getSBATStart();
+ for(int i=0; i<_header.getSBATCount() && nextAt != POIFSConstants.END_OF_CHAIN; i++) {
+ loopDetector.claim(nextAt);
+ ByteBuffer fatData = getBlockAt(nextAt);
+ sfat = BATBlock.createBATBlock(bigBlockSize, fatData);
+ sfat.setOurBlockIndex(nextAt);
+ sbats.add(sfat);
+ nextAt = getNextBlock(nextAt);
+ }
+ }
+ private void readBAT(int batAt, ChainLoopDetector loopDetector) throws IOException {
+ loopDetector.claim(batAt);
+ ByteBuffer fatData = getBlockAt(batAt);
+ BATBlock bat = BATBlock.createBATBlock(bigBlockSize, fatData);
+ bat.setOurBlockIndex(batAt);
+ _bat_blocks.add(bat);
+ }
+ private BATBlock createBAT(int offset, boolean isBAT) throws IOException {
+ // Create a new BATBlock
+ BATBlock newBAT = BATBlock.createEmptyBATBlock(bigBlockSize, !isBAT);
+ newBAT.setOurBlockIndex(offset);
+ // Ensure there's a spot in the file for it
+ ByteBuffer buffer = ByteBuffer.allocate(bigBlockSize.getBigBlockSize());
+ int writeTo = (1+offset) * bigBlockSize.getBigBlockSize(); // Header isn't in BATs
+ _data.write(buffer, writeTo);
+ // All done
+ return newBAT;
+ }
+
+ /**
+ * Load the block at the given offset.
+ */
+ @Override
+ protected ByteBuffer getBlockAt(final int offset) throws IOException {
+ // The header block doesn't count, so add one
+ long blockWanted = offset + 1L;
+ long startAt = blockWanted * bigBlockSize.getBigBlockSize();
+ try {
+ return _data.read(bigBlockSize.getBigBlockSize(), startAt);
+ } catch (IndexOutOfBoundsException e) {
+ IndexOutOfBoundsException wrapped = new IndexOutOfBoundsException("Block " + offset + " not found");
+ wrapped.initCause(e);
+ throw wrapped;
+ }
+ }
+
+ /**
+ * Load the block at the given offset,
+ * extending the file if needed
+ */
+ @Override
+ protected ByteBuffer createBlockIfNeeded(final int offset) throws IOException {
+ try {
+ return getBlockAt(offset);
+ } catch(IndexOutOfBoundsException e) {
+ // The header block doesn't count, so add one
+ long startAt = (offset+1L) * bigBlockSize.getBigBlockSize();
+ // Allocate and write
+ ByteBuffer buffer = ByteBuffer.allocate(getBigBlockSize());
+ _data.write(buffer, startAt);
+ // Retrieve the properly backed block
+ return getBlockAt(offset);
+ }
+ }
+
+ /**
+ * Returns the BATBlock that handles the specified offset,
+ * and the relative index within it
+ */
+ @Override
+ protected BATBlockAndIndex getBATBlockAndIndex(final int offset) {
+ return BATBlock.getBATBlockAndIndex(
+ offset, _header, _bat_blocks
+ );
+ }
+
+ /**
+ * Works out what block follows the specified one.
+ */
+ @Override
+ protected int getNextBlock(final int offset) {
+ BATBlockAndIndex bai = getBATBlockAndIndex(offset);
+ return bai.getBlock().getValueAt( bai.getIndex() );
+ }
+
+ /**
+ * Changes the record of what block follows the specified one.
+ */
+ @Override
+ protected void setNextBlock(final int offset, final int nextBlock) {
+ BATBlockAndIndex bai = getBATBlockAndIndex(offset);
+ bai.getBlock().setValueAt(
+ bai.getIndex(), nextBlock
+ );
+ }
+
+ /**
+ * Finds a free block, and returns its offset.
+ * This method will extend the file if needed, and if doing
+ * so, allocate new FAT blocks to address the extra space.
+ */
+ @Override
+ protected int getFreeBlock() throws IOException {
+ int numSectors = bigBlockSize.getBATEntriesPerBlock();
+
+ // First up, do we have any spare ones?
+ int offset = 0;
+ for (BATBlock bat : _bat_blocks) {
+ if(bat.hasFreeSectors()) {
+ // Claim one of them and return it
+ for(int j=0; j<numSectors; j++) {
+ int batValue = bat.getValueAt(j);
+ if(batValue == POIFSConstants.UNUSED_BLOCK) {
+ // Bingo
+ return offset + j;
+ }
+ }
+ }
+
+ // Move onto the next BAT
+ offset += numSectors;
+ }
+
+ // If we get here, then there aren't any free sectors
+ // in any of the BATs, so we need another BAT
+ BATBlock bat = createBAT(offset, true);
+ bat.setValueAt(0, POIFSConstants.FAT_SECTOR_BLOCK);
+ _bat_blocks.add(bat);
+
+ // Now store a reference to the BAT in the required place
+ if(_header.getBATCount() >= 109) {
+ // Needs to come from an XBAT
+ BATBlock xbat = null;
+ for(BATBlock x : _xbat_blocks) {
+ if(x.hasFreeSectors()) {
+ xbat = x;
+ break;
+ }
+ }
+ if(xbat == null) {
+ // Oh joy, we need a new XBAT too...
+ xbat = createBAT(offset+1, false);
+ // Allocate our new BAT as the first block in the XBAT
+ xbat.setValueAt(0, offset);
+ // And allocate the XBAT in the BAT
+ bat.setValueAt(1, POIFSConstants.DIFAT_SECTOR_BLOCK);
+
+ // Will go one place higher as XBAT added in
+ offset++;
+
+ // Chain it
+ if(_xbat_blocks.size() == 0) {
+ _header.setXBATStart(offset);
+ } else {
+ _xbat_blocks.get(_xbat_blocks.size()-1).setValueAt(
+ bigBlockSize.getXBATEntriesPerBlock(), offset
+ );
+ }
+ _xbat_blocks.add(xbat);
+ _header.setXBATCount(_xbat_blocks.size());
+ } else {
+ // Allocate our BAT in the existing XBAT with space
+ for(int i=0; i<bigBlockSize.getXBATEntriesPerBlock(); i++) {
+ if(xbat.getValueAt(i) == POIFSConstants.UNUSED_BLOCK) {
+ xbat.setValueAt(i, offset);
+ break;
+ }
+ }
+ }
+ } else {
+ // Store us in the header
+ int[] newBATs = new int[_header.getBATCount()+1];
+ System.arraycopy(_header.getBATArray(), 0, newBATs, 0, newBATs.length-1);
+ newBATs[newBATs.length-1] = offset;
+ _header.setBATArray(newBATs);
+ }
+ _header.setBATCount(_bat_blocks.size());
+
+ // The current offset stores us, but the next one is free
+ return offset+1;
+ }
+
+ protected long size() throws IOException {
+ return _data.size();
+ }
+
+ @Override
+ protected ChainLoopDetector getChainLoopDetector() throws IOException {
+ return new ChainLoopDetector(_data.size());
}
+ /**
+ * For unit testing only! Returns the underlying
+ * properties table
+ */
+ PropertyTable _get_property_table() {
+ return _property_table;
+ }
+
/**
- * <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
- * creating from an <tt>InputStream</tt>.</p>
- *
- * <p>Note that with this constructor, you will need to call {@link #close()}
- * when you're done to have the underlying file closed, as the file is
- * kept open during normal operation to read the data out.</p>
- * @param readOnly whether the POIFileSystem will only be used in read-only mode
- *
- * @param file the File from which to read the data
+ * Returns the MiniStore, which performs a similar low
+ * level function to this, except for the small blocks.
+ */
+ POIFSMiniStore getMiniStore() {
+ return _mini_store;
+ }
+
+ /**
+ * add a new POIFSDocument to the FileSytem
*
- * @exception IOException on errors reading, or on invalid data
+ * @param document the POIFSDocument being added
+ */
+ void addDocument(final POIFSDocument document)
+ {
+ _property_table.addProperty(document.getDocumentProperty());
+ }
+
+ /**
+ * add a new DirectoryProperty to the FileSystem
+ *
+ * @param directory the DirectoryProperty being added
*/
- public POIFSFileSystem(File file, boolean readOnly) throws IOException {
- super(file, readOnly);
+ void addDirectory(final DirectoryProperty directory)
+ {
+ _property_table.addProperty(directory);
+ }
+
+ /**
+ * Create a new document to be added to the root directory
+ *
+ * @param stream the InputStream from which the document's data
+ * will be obtained
+ * @param name the name of the new POIFSDocument
+ *
+ * @return the new DocumentEntry
+ *
+ * @exception IOException on error creating the new POIFSDocument
+ */
+
+ public DocumentEntry createDocument(final InputStream stream,
+ final String name)
+ throws IOException
+ {
+ return getRoot().createDocument(name, stream);
+ }
+
+ /**
+ * create a new DocumentEntry in the root entry; the data will be
+ * provided later
+ *
+ * @param name the name of the new DocumentEntry
+ * @param size the size of the new DocumentEntry
+ * @param writer the writer of the new DocumentEntry
+ *
+ * @return the new DocumentEntry
+ *
+ * @exception IOException if the writer exceeds the given size
+ */
+ public DocumentEntry createDocument(final String name, final int size, final POIFSWriterListener writer)
+ throws IOException {
+ return getRoot().createDocument(name, size, writer);
+ }
+
+ /**
+ * create a new DirectoryEntry in the root directory
+ *
+ * @param name the name of the new DirectoryEntry
+ *
+ * @return the new DirectoryEntry
+ *
+ * @exception IOException on name duplication
+ */
+
+ public DirectoryEntry createDirectory(final String name)
+ throws IOException
+ {
+ return getRoot().createDirectory(name);
}
/**
- * <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
- * creating from an <tt>InputStream</tt>. The File will be opened read-only</p>
- *
- * <p>Note that with this constructor, you will need to call {@link #close()}
- * when you're done to have the underlying file closed, as the file is
- * kept open during normal operation to read the data out.</p>
- *
- * @param file the File from which to read the data
+ * Set the contents of a document in the root directory,
+ * creating if needed, otherwise updating
*
- * @exception IOException on errors reading, or on invalid data
+ * @param stream the InputStream from which the document's data
+ * will be obtained
+ * @param name the name of the new or existing POIFSDocument
+ *
+ * @return the new or updated DocumentEntry
+ *
+ * @exception IOException on error populating the POIFSDocument
*/
- public POIFSFileSystem(File file) throws IOException {
- super(file);
+ @SuppressWarnings("UnusedReturnValue")
+ public DocumentEntry createOrUpdateDocument(final InputStream stream, final String name)
+ throws IOException {
+ return getRoot().createOrUpdateDocument(name, stream);
}
+ /**
+ * Does the filesystem support an in-place write via
+ * {@link #writeFilesystem()} ? If false, only writing out to
+ * a brand new file via {@link #writeFilesystem(OutputStream)}
+ * is supported.
+ */
+ public boolean isInPlaceWriteable() {
+ return (_data instanceof FileBackedDataSource) && ((FileBackedDataSource) _data).isWriteable();
+ }
+
+ /**
+ * Write the filesystem out to the open file. Will thrown an
+ * {@link IllegalArgumentException} if opened from an
+ * {@link InputStream}.
+ *
+ * @exception IOException thrown on errors writing to the stream
+ */
+ public void writeFilesystem() throws IOException {
+ if (!(_data instanceof FileBackedDataSource)) {
+ throw new IllegalArgumentException(
+ "POIFS opened from an inputstream, so writeFilesystem() may " +
+ "not be called. Use writeFilesystem(OutputStream) instead"
+ );
+ }
+ if (! ((FileBackedDataSource)_data).isWriteable()) {
+ throw new IllegalArgumentException(
+ "POIFS opened in read only mode, so writeFilesystem() may " +
+ "not be called. Open the FileSystem in read-write mode first"
+ );
+ }
+ syncWithDataSource();
+ }
+
+ /**
+ * Write the filesystem out
+ *
+ * @param stream the OutputStream to which the filesystem will be
+ * written
+ *
+ * @exception IOException thrown on errors writing to the stream
+ */
+ public void writeFilesystem(final OutputStream stream) throws IOException {
+ // Have the datasource updated
+ syncWithDataSource();
+
+ // Now copy the contents to the stream
+ _data.copyTo(stream);
+ }
+
+ /**
+ * Has our in-memory objects write their state
+ * to their backing blocks
+ */
+ private void syncWithDataSource() throws IOException {
+ // Mini Stream + SBATs first, as mini-stream details have
+ // to be stored in the Root Property
+ _mini_store.syncWithDataSource();
+
+ // Properties
+ POIFSStream propStream = new POIFSStream(this, _header.getPropertyStart());
+ _property_table.preWrite();
+ _property_table.write(propStream);
+ // _header.setPropertyStart has been updated on write ...
+
+ // HeaderBlock
+ ByteArrayOutputStream baos = new ByteArrayOutputStream(
+ _header.getBigBlockSize().getBigBlockSize()
+ );
+ _header.writeData(baos);
+ getBlockAt(-1).put(baos.toByteArray());
+
+
+ // BATs
+ for(BATBlock bat : _bat_blocks) {
+ ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
+ bat.writeData(block);
+ }
+ // XBats
+ for(BATBlock bat : _xbat_blocks) {
+ ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
+ bat.writeData(block);
+ }
+ }
+
+ /**
+ * Closes the FileSystem, freeing any underlying files, streams
+ * and buffers. After this, you will be unable to read or
+ * write from the FileSystem.
+ */
+ public void close() throws IOException {
+ _data.close();
+ }
+
+ /**
+ * read in a file and write it back out again
+ *
+ * @param args names of the files; arg[ 0 ] is the input file,
+ * arg[ 1 ] is the output file
+ */
+ public static void main(String args[]) throws IOException {
+ if (args.length != 2) {
+ System.err.println(
+ "two arguments required: input filename and output filename");
+ System.exit(1);
+ }
+
+ try (FileInputStream istream = new FileInputStream(args[0])) {
+ try (FileOutputStream ostream = new FileOutputStream(args[1])) {
+ try (POIFSFileSystem fs = new POIFSFileSystem(istream)) {
+ fs.writeFilesystem(ostream);
+ }
+ }
+ }
+ }
+
+ /**
+ * Get the root entry
+ *
+ * @return the root entry
+ */
+ public DirectoryNode getRoot() {
+ if (_root == null) {
+ _root = new DirectoryNode(_property_table.getRoot(), this, null);
+ }
+ return _root;
+ }
+
+ /**
+ * open a document in the root entry's list of entries
+ *
+ * @param documentName the name of the document to be opened
+ *
+ * @return a newly opened DocumentInputStream
+ *
+ * @exception IOException if the document does not exist or the
+ * name is that of a DirectoryEntry
+ */
+ public DocumentInputStream createDocumentInputStream(
+ final String documentName) throws IOException {
+ return getRoot().createDocumentInputStream(documentName);
+ }
+
+ /**
+ * remove an entry
+ *
+ * @param entry to be removed
+ */
+ void remove(EntryNode entry) throws IOException {
+ // If it's a document, free the blocks
+ if (entry instanceof DocumentEntry) {
+ POIFSDocument doc = new POIFSDocument((DocumentProperty)entry.getProperty(), this);
+ doc.free();
+ }
+
+ // Now zap it from the properties list
+ _property_table.removeProperty(entry.getProperty());
+ }
+
+ /* ********** START begin implementation of POIFSViewable ********** */
+
+ /**
+ * Get an array of objects, some of which may implement
+ * POIFSViewable
+ *
+ * @return an array of Object; may not be null, but may be empty
+ */
+ public Object [] getViewableArray() {
+ if (preferArray()) {
+ return getRoot().getViewableArray();
+ }
+
+ return new Object[ 0 ];
+ }
+
+ /**
+ * Get an Iterator of objects, some of which may implement
+ * POIFSViewable
+ *
+ * @return an Iterator; may not be null, but may have an empty
+ * back end store
+ */
+
+ public Iterator<Object> getViewableIterator() {
+ if (!preferArray()) {
+ return getRoot().getViewableIterator();
+ }
+
+ return Collections.emptyList().iterator();
+ }
+
+ /**
+ * Give viewers a hint as to whether to call getViewableArray or
+ * getViewableIterator
+ *
+ * @return true if a viewer should call getViewableArray, false if
+ * a viewer should call getViewableIterator
+ */
+
+ public boolean preferArray() {
+ return getRoot().preferArray();
+ }
+
+ /**
+ * Provides a short description of the object, to be used when a
+ * POIFSViewable object has not provided its contents.
+ *
+ * @return short description
+ */
+
+ public String getShortDescription() {
+ return "POIFS FileSystem";
+ }
+
+ /* ********** END begin implementation of POIFSViewable ********** */
+
+ /**
+ * @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
+ */
+ public int getBigBlockSize() {
+ return bigBlockSize.getBigBlockSize();
+ }
+
+ /**
+ * @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
+ */
+ @SuppressWarnings("WeakerAccess")
+ public POIFSBigBlockSize getBigBlockSizeDetails() {
+ return bigBlockSize;
+ }
+
/**
* Creates a new {@link POIFSFileSystem} in a new {@link File}.
* Use {@link #POIFSFileSystem(File)} to open an existing File,
*/
public static POIFSFileSystem create(File file) throws IOException {
// Create a new empty POIFS in the file
- try (POIFSFileSystem tmp = new POIFSFileSystem()) {
- try (OutputStream out = new FileOutputStream(file)) {
- tmp.writeFilesystem(out);
- }
+ try (POIFSFileSystem tmp = new POIFSFileSystem();
+ OutputStream out = new FileOutputStream(file)) {
+ tmp.writeFilesystem(out);
}
-
+
// Open it up again backed by the file
return new POIFSFileSystem(file, false);
}
- /**
- * read in a file and write it back out again
- *
- * @param args names of the files; arg[ 0 ] is the input file,
- * arg[ 1 ] is the output file
- */
- public static void main(String args[]) throws IOException {
- NPOIFSFileSystem.main(args);
+ @Override
+ protected int getBlockStoreBlockSize() {
+ return getBigBlockSize();
+ }
+
+ @Internal
+ public PropertyTable getPropertyTable() {
+ return _property_table;
}
+
+ @Internal
+ public HeaderBlock getHeaderBlock() {
+ return _header;
+ }
+
+
+ private static void sanityCheckBlockCount(int block_count) throws IOException {
+ if (block_count <= 0) {
+ throw new IOException(
+ "Illegal block count; minimum count is 1, got " +
+ block_count + " instead"
+ );
+ }
+ if (block_count > MAX_BLOCK_COUNT) {
+ throw new IOException(
+ "Block count " + block_count +
+ " is too high. POI maximum is " + MAX_BLOCK_COUNT + "."
+ );
+ }
+ }
+
}
+
--- /dev/null
+
+/* ====================================================================
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+==================================================================== */
+
+
+package org.apache.poi.poifs.filesystem;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.poi.poifs.common.POIFSConstants;
+import org.apache.poi.poifs.property.RootProperty;
+import org.apache.poi.poifs.storage.BATBlock;
+import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex;
+import org.apache.poi.poifs.storage.HeaderBlock;
+
+/**
+ * This class handles the MiniStream (small block store)
+ * in the NIO case for {@link POIFSFileSystem}
+ */
+public class POIFSMiniStore extends BlockStore
+{
+ private POIFSFileSystem _filesystem;
+ private POIFSStream _mini_stream;
+ private List<BATBlock> _sbat_blocks;
+ private HeaderBlock _header;
+ private RootProperty _root;
+
+ POIFSMiniStore(POIFSFileSystem filesystem, RootProperty root,
+ List<BATBlock> sbats, HeaderBlock header)
+ {
+ this._filesystem = filesystem;
+ this._sbat_blocks = sbats;
+ this._header = header;
+ this._root = root;
+
+ this._mini_stream = new POIFSStream(filesystem, root.getStartBlock());
+ }
+
+ /**
+ * Load the block at the given offset.
+ */
+ protected ByteBuffer getBlockAt(final int offset) {
+ // Which big block is this?
+ int byteOffset = offset * POIFSConstants.SMALL_BLOCK_SIZE;
+ int bigBlockNumber = byteOffset / _filesystem.getBigBlockSize();
+ int bigBlockOffset = byteOffset % _filesystem.getBigBlockSize();
+
+ // Now locate the data block for it
+ Iterator<ByteBuffer> it = _mini_stream.getBlockIterator();
+ for(int i=0; i<bigBlockNumber; i++) {
+ it.next();
+ }
+ ByteBuffer dataBlock = it.next();
+ if(dataBlock == null) {
+ throw new IndexOutOfBoundsException("Big block " + bigBlockNumber + " outside stream");
+ }
+
+ // Position ourselves, and take a slice
+ dataBlock.position(
+ dataBlock.position() + bigBlockOffset
+ );
+ ByteBuffer miniBuffer = dataBlock.slice();
+ miniBuffer.limit(POIFSConstants.SMALL_BLOCK_SIZE);
+ return miniBuffer;
+ }
+
+ /**
+ * Load the block, extending the underlying stream if needed
+ */
+ protected ByteBuffer createBlockIfNeeded(final int offset) throws IOException {
+ boolean firstInStore = false;
+ if (_mini_stream.getStartBlock() == POIFSConstants.END_OF_CHAIN) {
+ firstInStore = true;
+ }
+
+ // Try to get it without extending the stream
+ if (! firstInStore) {
+ try {
+ return getBlockAt(offset);
+ } catch(IndexOutOfBoundsException e) {}
+ }
+
+ // Need to extend the stream
+ // TODO Replace this with proper append support
+ // For now, do the extending by hand...
+
+ // Ask for another block
+ int newBigBlock = _filesystem.getFreeBlock();
+ _filesystem.createBlockIfNeeded(newBigBlock);
+
+ // If we are the first block to be allocated, initialise the stream
+ if (firstInStore) {
+ _filesystem._get_property_table().getRoot().setStartBlock(newBigBlock);
+ _mini_stream = new POIFSStream(_filesystem, newBigBlock);
+ } else {
+ // Tack it onto the end of our chain
+ ChainLoopDetector loopDetector = _filesystem.getChainLoopDetector();
+ int block = _mini_stream.getStartBlock();
+ while(true) {
+ loopDetector.claim(block);
+ int next = _filesystem.getNextBlock(block);
+ if(next == POIFSConstants.END_OF_CHAIN) {
+ break;
+ }
+ block = next;
+ }
+ _filesystem.setNextBlock(block, newBigBlock);
+ }
+
+ // This is now the new end
+ _filesystem.setNextBlock(newBigBlock, POIFSConstants.END_OF_CHAIN);
+
+ // Now try again, to get the real small block
+ return createBlockIfNeeded(offset);
+ }
+
+ /**
+ * Returns the BATBlock that handles the specified offset,
+ * and the relative index within it
+ */
+ protected BATBlockAndIndex getBATBlockAndIndex(final int offset) {
+ return BATBlock.getSBATBlockAndIndex(
+ offset, _header, _sbat_blocks
+ );
+ }
+
+ /**
+ * Works out what block follows the specified one.
+ */
+ protected int getNextBlock(final int offset) {
+ BATBlockAndIndex bai = getBATBlockAndIndex(offset);
+ return bai.getBlock().getValueAt( bai.getIndex() );
+ }
+
+ /**
+ * Changes the record of what block follows the specified one.
+ */
+ protected void setNextBlock(final int offset, final int nextBlock) {
+ BATBlockAndIndex bai = getBATBlockAndIndex(offset);
+ bai.getBlock().setValueAt(
+ bai.getIndex(), nextBlock
+ );
+ }
+
+ /**
+ * Finds a free block, and returns its offset.
+ * This method will extend the file if needed, and if doing
+ * so, allocate new FAT blocks to address the extra space.
+ */
+ protected int getFreeBlock() throws IOException {
+ int sectorsPerSBAT = _filesystem.getBigBlockSizeDetails().getBATEntriesPerBlock();
+
+ // First up, do we have any spare ones?
+ int offset = 0;
+ for (BATBlock sbat : _sbat_blocks) {
+ // Check this one
+ if (sbat.hasFreeSectors()) {
+ // Claim one of them and return it
+ for (int j = 0; j < sectorsPerSBAT; j++) {
+ int sbatValue = sbat.getValueAt(j);
+ if (sbatValue == POIFSConstants.UNUSED_BLOCK) {
+ // Bingo
+ return offset + j;
+ }
+ }
+ }
+
+ // Move onto the next SBAT
+ offset += sectorsPerSBAT;
+ }
+
+ // If we get here, then there aren't any
+ // free sectors in any of the SBATs
+ // So, we need to extend the chain and add another
+
+ // Create a new BATBlock
+ BATBlock newSBAT = BATBlock.createEmptyBATBlock(_filesystem.getBigBlockSizeDetails(), false);
+ int batForSBAT = _filesystem.getFreeBlock();
+ newSBAT.setOurBlockIndex(batForSBAT);
+
+ // Are we the first SBAT?
+ if(_header.getSBATCount() == 0) {
+ // Tell the header that we've got our first SBAT there
+ _header.setSBATStart(batForSBAT);
+ _header.setSBATBlockCount(1);
+ } else {
+ // Find the end of the SBAT stream, and add the sbat in there
+ ChainLoopDetector loopDetector = _filesystem.getChainLoopDetector();
+ int batOffset = _header.getSBATStart();
+ while(true) {
+ loopDetector.claim(batOffset);
+ int nextBat = _filesystem.getNextBlock(batOffset);
+ if(nextBat == POIFSConstants.END_OF_CHAIN) {
+ break;
+ }
+ batOffset = nextBat;
+ }
+
+ // Add it in at the end
+ _filesystem.setNextBlock(batOffset, batForSBAT);
+
+ // And update the count
+ _header.setSBATBlockCount(
+ _header.getSBATCount() + 1
+ );
+ }
+
+ // Finish allocating
+ _filesystem.setNextBlock(batForSBAT, POIFSConstants.END_OF_CHAIN);
+ _sbat_blocks.add(newSBAT);
+
+ // Return our first spot
+ return offset;
+ }
+
+ @Override
+ protected ChainLoopDetector getChainLoopDetector() {
+ return new ChainLoopDetector( _root.getSize() );
+ }
+
+ protected int getBlockStoreBlockSize() {
+ return POIFSConstants.SMALL_BLOCK_SIZE;
+ }
+
+ /**
+ * Writes the SBATs to their backing blocks, and updates
+ * the mini-stream size in the properties. Stream size is
+ * based on full blocks used, not the data within the streams
+ */
+ void syncWithDataSource() throws IOException {
+ int blocksUsed = 0;
+ for (BATBlock sbat : _sbat_blocks) {
+ ByteBuffer block = _filesystem.getBlockAt(sbat.getOurBlockIndex());
+ sbat.writeData(block);
+
+ if (!sbat.hasFreeSectors()) {
+ blocksUsed += _filesystem.getBigBlockSizeDetails().getBATEntriesPerBlock();
+ } else {
+ blocksUsed += sbat.getUsedSectors(false);
+ }
+ }
+ // Set the size on the root in terms of the number of SBAT blocks
+ // RootProperty.setSize does the sbat -> bytes conversion for us
+ _filesystem._get_property_table().getRoot().setSize(blocksUsed);
+ }
+}
--- /dev/null
+
+/* ====================================================================
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+==================================================================== */
+
+
+package org.apache.poi.poifs.filesystem;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+
+import org.apache.poi.poifs.common.POIFSConstants;
+import org.apache.poi.poifs.filesystem.BlockStore.ChainLoopDetector;
+import org.apache.poi.poifs.property.Property;
+import org.apache.poi.poifs.storage.HeaderBlock;
+
+/**
+ * This handles reading and writing a stream within a
+ * {@link POIFSFileSystem}. It can supply an iterator
+ * to read blocks, and way to write out to existing and
+ * new blocks.
+ * Most users will want a higher level version of this,
+ * which deals with properties to track which stream
+ * this is.
+ * This only works on big block streams, it doesn't
+ * handle small block ones.
+ * This uses the new NIO code
+ *
+ * TODO Implement a streaming write method, and append
+ */
+
+public class POIFSStream implements Iterable<ByteBuffer>
+{
+ private BlockStore blockStore;
+ private int startBlock;
+ private OutputStream outStream;
+
+ /**
+ * Constructor for an existing stream. It's up to you
+ * to know how to get the start block (eg from a
+ * {@link HeaderBlock} or a {@link Property})
+ */
+ public POIFSStream(BlockStore blockStore, int startBlock) {
+ this.blockStore = blockStore;
+ this.startBlock = startBlock;
+ }
+
+ /**
+ * Constructor for a new stream. A start block won't
+ * be allocated until you begin writing to it.
+ */
+ public POIFSStream(BlockStore blockStore) {
+ this.blockStore = blockStore;
+ this.startBlock = POIFSConstants.END_OF_CHAIN;
+ }
+
+ /**
+ * What block does this stream start at?
+ * Will be {@link POIFSConstants#END_OF_CHAIN} for a
+ * new stream that hasn't been written to yet.
+ */
+ public int getStartBlock() {
+ return startBlock;
+ }
+
+ /**
+ * Returns an iterator that'll supply one {@link ByteBuffer}
+ * per block in the stream.
+ */
+ public Iterator<ByteBuffer> iterator() {
+ return getBlockIterator();
+ }
+
+ Iterator<ByteBuffer> getBlockIterator() {
+ if(startBlock == POIFSConstants.END_OF_CHAIN) {
+ throw new IllegalStateException(
+ "Can't read from a new stream before it has been written to"
+ );
+ }
+ return new StreamBlockByteBufferIterator(startBlock);
+ }
+
+ /**
+ * Updates the contents of the stream to the new
+ * set of bytes.
+ * Note - if this is property based, you'll still
+ * need to update the size in the property yourself
+ */
+ void updateContents(byte[] contents) throws IOException {
+ OutputStream os = getOutputStream();
+ os.write(contents);
+ os.close();
+ }
+
+ public OutputStream getOutputStream() throws IOException {
+ if (outStream == null) {
+ outStream = new StreamBlockByteBuffer();
+ }
+ return outStream;
+ }
+
+ // TODO Streaming write support
+ // TODO then convert fixed sized write to use streaming internally
+ // TODO Append write support (probably streaming)
+
+ /**
+ * Frees all blocks in the stream
+ */
+ public void free() throws IOException {
+ ChainLoopDetector loopDetector = blockStore.getChainLoopDetector();
+ free(loopDetector);
+ }
+ private void free(ChainLoopDetector loopDetector) {
+ int nextBlock = startBlock;
+ while(nextBlock != POIFSConstants.END_OF_CHAIN) {
+ int thisBlock = nextBlock;
+ loopDetector.claim(thisBlock);
+ nextBlock = blockStore.getNextBlock(thisBlock);
+ blockStore.setNextBlock(thisBlock, POIFSConstants.UNUSED_BLOCK);
+ }
+ this.startBlock = POIFSConstants.END_OF_CHAIN;
+ }
+
+ /**
+ * Class that handles a streaming read of one stream
+ */
+ protected class StreamBlockByteBufferIterator implements Iterator<ByteBuffer> {
+ private ChainLoopDetector loopDetector;
+ private int nextBlock;
+
+ StreamBlockByteBufferIterator(int firstBlock) {
+ this.nextBlock = firstBlock;
+ try {
+ this.loopDetector = blockStore.getChainLoopDetector();
+ } catch(IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public boolean hasNext() {
+ return nextBlock != POIFSConstants.END_OF_CHAIN;
+ }
+
+ public ByteBuffer next() {
+ if(nextBlock == POIFSConstants.END_OF_CHAIN) {
+ throw new IndexOutOfBoundsException("Can't read past the end of the stream");
+ }
+
+ try {
+ loopDetector.claim(nextBlock);
+ ByteBuffer data = blockStore.getBlockAt(nextBlock);
+ nextBlock = blockStore.getNextBlock(nextBlock);
+ return data;
+ } catch(IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public void remove() {
+ throw new UnsupportedOperationException();
+ }
+ }
+
+ protected class StreamBlockByteBuffer extends OutputStream {
+ byte oneByte[] = new byte[1];
+ ByteBuffer buffer;
+ // Make sure we don't encounter a loop whilst overwriting
+ // the existing blocks
+ ChainLoopDetector loopDetector;
+ int prevBlock, nextBlock;
+
+ StreamBlockByteBuffer() throws IOException {
+ loopDetector = blockStore.getChainLoopDetector();
+ prevBlock = POIFSConstants.END_OF_CHAIN;
+ nextBlock = startBlock;
+ }
+
+ void createBlockIfNeeded() throws IOException {
+ if (buffer != null && buffer.hasRemaining()) return;
+
+ int thisBlock = nextBlock;
+
+ // Allocate a block if needed, otherwise figure
+ // out what the next block will be
+ if(thisBlock == POIFSConstants.END_OF_CHAIN) {
+ thisBlock = blockStore.getFreeBlock();
+ loopDetector.claim(thisBlock);
+
+ // We're on the end of the chain
+ nextBlock = POIFSConstants.END_OF_CHAIN;
+
+ // Mark the previous block as carrying on to us if needed
+ if(prevBlock != POIFSConstants.END_OF_CHAIN) {
+ blockStore.setNextBlock(prevBlock, thisBlock);
+ }
+ blockStore.setNextBlock(thisBlock, POIFSConstants.END_OF_CHAIN);
+
+ // If we've just written the first block on a
+ // new stream, save the start block offset
+ if(startBlock == POIFSConstants.END_OF_CHAIN) {
+ startBlock = thisBlock;
+ }
+ } else {
+ loopDetector.claim(thisBlock);
+ nextBlock = blockStore.getNextBlock(thisBlock);
+ }
+
+ buffer = blockStore.createBlockIfNeeded(thisBlock);
+
+ // Update pointers
+ prevBlock = thisBlock;
+ }
+
+ @Override
+ public void write(int b) throws IOException {
+ oneByte[0] = (byte)(b & 0xFF);
+ write(oneByte);
+ }
+
+ @Override
+ public void write(byte[] b, int off, int len) throws IOException {
+ if ((off < 0) || (off > b.length) || (len < 0) ||
+ ((off + len) > b.length) || ((off + len) < 0)) {
+ throw new IndexOutOfBoundsException();
+ } else if (len == 0) {
+ return;
+ }
+
+ do {
+ createBlockIfNeeded();
+ int writeBytes = Math.min(buffer.remaining(), len);
+ buffer.put(b, off, writeBytes);
+ off += writeBytes;
+ len -= writeBytes;
+ } while (len > 0);
+ }
+
+ public void close() throws IOException {
+ // If we're overwriting, free any remaining blocks
+ POIFSStream toFree = new POIFSStream(blockStore, nextBlock);
+ toFree.free(loopDetector);
+
+ // Mark the end of the stream, if we have any data
+ if (prevBlock != POIFSConstants.END_OF_CHAIN) {
+ blockStore.setNextBlock(prevBlock, POIFSConstants.END_OF_CHAIN);
+ }
+ }
+ }
+}
+
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.FileMagic;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
import org.apache.poi.poifs.macros.Module.ModuleType;
import org.apache.poi.util.CodePageUtil;
protected static final String VBA_PROJECT_OOXML = "vbaProject.bin";
protected static final String VBA_PROJECT_POIFS = "VBA";
- private NPOIFSFileSystem fs;
+ private POIFSFileSystem fs;
public VBAMacroReader(InputStream rstream) throws IOException {
InputStream is = FileMagic.prepareToCheckMagic(rstream);
FileMagic fm = FileMagic.valueOf(is);
if (fm == FileMagic.OLE2) {
- fs = new NPOIFSFileSystem(is);
+ fs = new POIFSFileSystem(is);
} else {
openOOXML(is);
}
public VBAMacroReader(File file) throws IOException {
try {
- this.fs = new NPOIFSFileSystem(file);
+ this.fs = new POIFSFileSystem(file);
} catch (OfficeXmlFileException e) {
openOOXML(new FileInputStream(file));
}
}
- public VBAMacroReader(NPOIFSFileSystem fs) {
+ public VBAMacroReader(POIFSFileSystem fs) {
this.fs = fs;
}
if (endsWithIgnoreCase(zipEntry.getName(), VBA_PROJECT_OOXML)) {
try {
// Make a NPOIFS from the contents, and close the stream
- this.fs = new NPOIFSFileSystem(zis);
+ this.fs = new POIFSFileSystem(zis);
return;
} catch (IOException e) {
// Tidy up
package org.apache.poi.poifs.property;
-import org.apache.poi.poifs.filesystem.NPOIFSDocument;
+import org.apache.poi.poifs.filesystem.POIFSDocument;
/**
* Trivial extension of Property for POIFSDocuments
*/
public class DocumentProperty extends Property {
// the POIFSDocument this property is associated with
- private NPOIFSDocument _document;
+ private POIFSDocument _document;
/**
* Constructor
*
* @param doc the associated POIFSDocument
*/
- public void setDocument(NPOIFSDocument doc)
+ public void setDocument(POIFSDocument doc)
{
_document = doc;
}
*
* @return the associated document
*/
- public NPOIFSDocument getDocument()
+ public POIFSDocument getDocument()
{
return _document;
}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.property;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
-import org.apache.poi.poifs.filesystem.NPOIFSStream;
-import org.apache.poi.poifs.storage.HeaderBlock;
-import org.apache.poi.util.IOUtils;
-import org.apache.poi.util.POILogFactory;
-import org.apache.poi.util.POILogger;
-
-/**
- * This class embodies the Property Table for a {@link NPOIFSFileSystem};
- * this is basically the directory for all of the documents in the
- * filesystem.
- */
-public final class NPropertyTable extends PropertyTableBase {
- private static final POILogger _logger =
- POILogFactory.getLogger(NPropertyTable.class);
- //arbitrarily selected; may need to increase
- private static final int MAX_RECORD_LENGTH = 100_000;
-
- private POIFSBigBlockSize _bigBigBlockSize;
-
- public NPropertyTable(HeaderBlock headerBlock)
- {
- super(headerBlock);
- _bigBigBlockSize = headerBlock.getBigBlockSize();
- }
-
- /**
- * reading constructor (used when we've read in a file and we want
- * to extract the property table from it). Populates the
- * properties thoroughly
- *
- * @param headerBlock the header block of the file
- * @param filesystem the filesystem to read from
- *
- * @exception IOException if anything goes wrong (which should be
- * a result of the input being NFG)
- */
- public NPropertyTable(final HeaderBlock headerBlock,
- final NPOIFSFileSystem filesystem)
- throws IOException
- {
- super(
- headerBlock,
- buildProperties(
- (new NPOIFSStream(filesystem, headerBlock.getPropertyStart())).iterator(),
- headerBlock.getBigBlockSize()
- )
- );
- _bigBigBlockSize = headerBlock.getBigBlockSize();
- }
-
- private static List<Property> buildProperties(final Iterator<ByteBuffer> dataSource,
- final POIFSBigBlockSize bigBlockSize) throws IOException
- {
- List<Property> properties = new ArrayList<>();
- while(dataSource.hasNext()) {
- ByteBuffer bb = dataSource.next();
-
- // Turn it into an array
- byte[] data;
- if(bb.hasArray() && bb.arrayOffset() == 0 &&
- bb.array().length == bigBlockSize.getBigBlockSize()) {
- data = bb.array();
- } else {
- data = IOUtils.safelyAllocate(bigBlockSize.getBigBlockSize(), MAX_RECORD_LENGTH);
-
- int toRead = data.length;
- if (bb.remaining() < bigBlockSize.getBigBlockSize()) {
- // Looks to be a truncated block
- // This isn't allowed, but some third party created files
- // sometimes do this, and we can normally read anyway
- _logger.log(POILogger.WARN, "Short Property Block, ", bb.remaining(),
- " bytes instead of the expected " + bigBlockSize.getBigBlockSize());
- toRead = bb.remaining();
- }
-
- bb.get(data, 0, toRead);
- }
-
- PropertyFactory.convertToProperties(data, properties);
- }
- return properties;
- }
-
- /**
- * Return the number of BigBlock's this instance uses
- *
- * @return count of BigBlock instances
- */
- public int countBlocks()
- {
- long rawSize = _properties.size() * (long)POIFSConstants.PROPERTY_SIZE;
- int blkSize = _bigBigBlockSize.getBigBlockSize();
- int numBlocks = (int)(rawSize / blkSize);
- if ((rawSize % blkSize) != 0) {
- numBlocks++;
- }
- return numBlocks;
- }
-
- /**
- * Prepare to be written
- */
- public void preWrite() {
- List<Property> pList = new ArrayList<>();
- // give each property its index
- int i=0;
- for (Property p : _properties) {
- // only handle non-null properties
- if (p == null) continue;
- p.setIndex(i++);
- pList.add(p);
- }
-
- // prepare each property for writing
- for (Property p : pList) p.preWrite();
- }
-
- /**
- * Writes the properties out into the given low-level stream
- */
- public void write(NPOIFSStream stream) throws IOException {
- OutputStream os = stream.getOutputStream();
- for(Property property : _properties) {
- if(property != null) {
- property.writeData(os);
- }
- }
- os.close();
-
- // Update the start position if needed
- if(getStartBlock() != stream.getStartBlock()) {
- setStartBlock(stream.getStartBlock());
- }
- }
-}
package org.apache.poi.poifs.property;
-import java.io.IOException;
-
-import java.util.*;
+import java.util.List;
import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.storage.ListManagedBlock;
/**
* Factory for turning an array of RawDataBlock instances containing
* @author Marc Johnson (mjohnson at apache dot org)
*/
-class PropertyFactory {
+final class PropertyFactory {
// no need for an accessible constructor
private PropertyFactory()
{
}
- /**
- * Convert raw data blocks to an array of Property's
- *
- * @param blocks to be converted
- *
- * @return the converted List of Property objects. May contain
- * nulls, but will not be null
- *
- * @exception IOException if any of the blocks are empty
- */
- static List<Property> convertToProperties(ListManagedBlock [] blocks)
- throws IOException
- {
- List<Property> properties = new ArrayList<>();
-
- for (ListManagedBlock block : blocks) {
- byte[] data = block.getData();
- convertToProperties(data, properties);
- }
- return properties;
- }
-
- static void convertToProperties(byte[] data, List<Property> properties)
- throws IOException
- {
+ static void convertToProperties(byte[] data, List<Property> properties) {
int property_count = data.length / POIFSConstants.PROPERTY_SIZE;
int offset = 0;
import java.io.IOException;
import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Stack;
import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.storage.BlockWritable;
+import org.apache.poi.poifs.common.POIFSConstants;
+import org.apache.poi.poifs.filesystem.BATManaged;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSStream;
import org.apache.poi.poifs.storage.HeaderBlock;
-import org.apache.poi.poifs.storage.PropertyBlock;
-import org.apache.poi.poifs.storage.RawDataBlockList;
+import org.apache.poi.util.IOUtils;
+import org.apache.poi.util.POILogFactory;
+import org.apache.poi.util.POILogger;
/**
- * This class embodies the Property Table for the {@link org.apache.poi.poifs.filesystem.POIFSFileSystem};
- * this is basically the directory for all of the documents in the
- * filesystem.
- *
- * @author Marc Johnson (mjohnson at apache dot org)
+ * This class embodies the Property Table for a {@link POIFSFileSystem};
+ * this is basically the directory for all of the documents in the
+ * filesystem and looks up entries in the filesystem to their
+ * chain of blocks.
*/
-public final class PropertyTable extends PropertyTableBase implements BlockWritable {
- private POIFSBigBlockSize _bigBigBlockSize;
- private BlockWritable[] _blocks;
+public final class PropertyTable implements BATManaged {
+ private static final POILogger _logger =
+ POILogFactory.getLogger(PropertyTable.class);
+
+ //arbitrarily selected; may need to increase
+ private static final int MAX_RECORD_LENGTH = 100_000;
+
+ private final HeaderBlock _header_block;
+ private final List<Property> _properties = new ArrayList<>();
+ private final POIFSBigBlockSize _bigBigBlockSize;
public PropertyTable(HeaderBlock headerBlock)
{
- super(headerBlock);
+ _header_block = headerBlock;
_bigBigBlockSize = headerBlock.getBigBlockSize();
- _blocks = null;
+ addProperty(new RootProperty());
}
/**
* properties thoroughly
*
* @param headerBlock the header block of the file
- * @param blockList the list of blocks
+ * @param filesystem the filesystem to read from
*
* @exception IOException if anything goes wrong (which should be
* a result of the input being NFG)
*/
- public PropertyTable(final HeaderBlock headerBlock,
- final RawDataBlockList blockList)
- throws IOException
- {
- super(
+ public PropertyTable(final HeaderBlock headerBlock, final POIFSFileSystem filesystem)
+ throws IOException {
+ this(
headerBlock,
- PropertyFactory.convertToProperties(
- blockList.fetchBlocks(headerBlock.getPropertyStart(), -1)
- )
+ new POIFSStream(filesystem, headerBlock.getPropertyStart())
);
+ }
+
+ /* only invoked locally and from the junit tests */
+ PropertyTable(final HeaderBlock headerBlock, final Iterable<ByteBuffer> dataSource)
+ throws IOException {
+ _header_block = headerBlock;
_bigBigBlockSize = headerBlock.getBigBlockSize();
- _blocks = null;
+
+ for (ByteBuffer bb : dataSource) {
+ // Turn it into an array
+ byte[] data;
+ if (bb.hasArray() && bb.arrayOffset() == 0 &&
+ bb.array().length == _bigBigBlockSize.getBigBlockSize()) {
+ data = bb.array();
+ } else {
+ data = IOUtils.safelyAllocate(_bigBigBlockSize.getBigBlockSize(), MAX_RECORD_LENGTH);
+
+ int toRead = data.length;
+ if (bb.remaining() < _bigBigBlockSize.getBigBlockSize()) {
+ // Looks to be a truncated block
+ // This isn't allowed, but some third party created files
+ // sometimes do this, and we can normally read anyway
+ _logger.log(POILogger.WARN, "Short Property Block, ", bb.remaining(),
+ " bytes instead of the expected " + _bigBigBlockSize.getBigBlockSize());
+ toRead = bb.remaining();
+ }
+
+ bb.get(data, 0, toRead);
+ }
+
+ PropertyFactory.convertToProperties(data, _properties);
+ }
+
+ populatePropertyTree( (DirectoryProperty)_properties.get(0));
}
+
/**
- * Prepare to be written
+ * Add a property to the list of properties we manage
+ *
+ * @param property the new Property to manage
*/
- public void preWrite()
- {
- Property[] properties = _properties.toArray(new Property[_properties.size()]);
+ public void addProperty(Property property) {
+ _properties.add(property);
+ }
- // give each property its index
- for (int k = 0; k < properties.length; k++)
- {
- properties[ k ].setIndex(k);
- }
+ /**
+ * Remove a property from the list of properties we manage
+ *
+ * @param property the Property to be removed
+ */
+ public void removeProperty(final Property property) {
+ _properties.remove(property);
+ }
- // allocate the blocks for the property table
- _blocks = PropertyBlock.createPropertyBlockArray(_bigBigBlockSize, _properties);
+ /**
+ * Get the root property
+ *
+ * @return the root property
+ */
+ public RootProperty getRoot() {
+ // it's always the first element in the List
+ return ( RootProperty ) _properties.get(0);
+ }
- // prepare each property for writing
- for (Property property : properties) {
- property.preWrite();
- }
+ /**
+ * Get the start block for the property table
+ *
+ * @return start block index
+ */
+ public int getStartBlock() {
+ return _header_block.getPropertyStart();
}
-
+
+ /**
+ * Set the start block for this instance
+ *
+ * @param index index into the array of BigBlock instances making
+ * up the the filesystem
+ */
+ public void setStartBlock(final int index) {
+ _header_block.setPropertyStart(index);
+ }
+
+
+
/**
* Return the number of BigBlock's this instance uses
*
* @return count of BigBlock instances
*/
- public int countBlocks()
- {
- return (_blocks == null) ? 0
- : _blocks.length;
+ public int countBlocks() {
+ long rawSize = _properties.size() * (long)POIFSConstants.PROPERTY_SIZE;
+ int blkSize = _bigBigBlockSize.getBigBlockSize();
+ int numBlocks = (int)(rawSize / blkSize);
+ if ((rawSize % blkSize) != 0) {
+ numBlocks++;
+ }
+ return numBlocks;
}
+
+ /**
+ * Prepare to be written
+ */
+ public void preWrite() {
+ List<Property> pList = new ArrayList<>();
+ // give each property its index
+ int i=0;
+ for (Property p : _properties) {
+ // only handle non-null properties
+ if (p == null) continue;
+ p.setIndex(i++);
+ pList.add(p);
+ }
+ // prepare each property for writing
+ for (Property p : pList) p.preWrite();
+ }
+
/**
- * Write the storage to an OutputStream
- *
- * @param stream the OutputStream to which the stored data should
- * be written
- *
- * @exception IOException on problems writing to the specified
- * stream
+ * Writes the properties out into the given low-level stream
*/
- public void writeBlocks(final OutputStream stream)
- throws IOException
- {
- if (_blocks != null)
- {
- for (BlockWritable _block : _blocks) {
- _block.writeBlocks(stream);
+ public void write(POIFSStream stream) throws IOException {
+ OutputStream os = stream.getOutputStream();
+ for(Property property : _properties) {
+ if(property != null) {
+ property.writeData(os);
+ }
+ }
+ os.close();
+
+ // Update the start position if needed
+ if(getStartBlock() != stream.getStartBlock()) {
+ setStartBlock(stream.getStartBlock());
+ }
+ }
+
+ private void populatePropertyTree(DirectoryProperty root) throws IOException {
+ int index = root.getChildIndex();
+
+ if (!Property.isValidIndex(index)) {
+ // property has no children
+ return;
+ }
+
+ final Stack<Property> children = new Stack<>();
+ children.push(_properties.get(index));
+ while (!children.empty()) {
+ Property property = children.pop();
+ if (property == null) {
+ // unknown / unsupported / corrupted property, skip
+ continue;
+ }
+
+ root.addChild(property);
+ if (property.isDirectory()) {
+ populatePropertyTree(( DirectoryProperty ) property);
+ }
+ index = property.getPreviousChildIndex();
+ if (isValidIndex(index)) {
+ children.push(_properties.get(index));
}
+ index = property.getNextChildIndex();
+ if (isValidIndex(index)) {
+ children.push(_properties.get(index));
+ }
+ }
+ }
+
+ private boolean isValidIndex(int index) {
+ if (! Property.isValidIndex(index))
+ return false;
+ if (index < 0 || index >= _properties.size()) {
+ _logger.log(POILogger.WARN, "Property index " + index +
+ "outside the valid range 0.."+_properties.size());
+ return false;
}
+ return true;
}
}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.property;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Stack;
-
-import org.apache.poi.poifs.filesystem.BATManaged;
-import org.apache.poi.poifs.storage.HeaderBlock;
-import org.apache.poi.util.POILogFactory;
-import org.apache.poi.util.POILogger;
-
-/**
- * This class embodies the Property Table for the filesystem,
- * which looks up entries in the filesystem to their
- * chain of blocks.
- * This is the core support, there are implementations
- * for the different block schemes as needed.
- */
-public abstract class PropertyTableBase implements BATManaged {
- private static final POILogger _logger =
- POILogFactory.getLogger(PropertyTableBase.class);
-
- private final HeaderBlock _header_block;
- protected final List<Property> _properties;
-
- public PropertyTableBase(final HeaderBlock header_block)
- {
- _header_block = header_block;
- _properties = new ArrayList<>();
- addProperty(new RootProperty());
- }
-
- /**
- * Reading constructor (used when we've read in a file and we want
- * to extract the property table from it). Populates the
- * properties thoroughly
- *
- * @param header_block the first block to read from
- * @param properties the list to populate
- *
- * @exception IOException if anything goes wrong (which should be
- * a result of the input being NFG)
- */
- public PropertyTableBase(final HeaderBlock header_block,
- final List<Property> properties)
- throws IOException
- {
- _header_block = header_block;
- _properties = properties;
- populatePropertyTree( (DirectoryProperty)_properties.get(0));
- }
-
- /**
- * Add a property to the list of properties we manage
- *
- * @param property the new Property to manage
- */
- public void addProperty(Property property)
- {
- _properties.add(property);
- }
-
- /**
- * Remove a property from the list of properties we manage
- *
- * @param property the Property to be removed
- */
- public void removeProperty(final Property property)
- {
- _properties.remove(property);
- }
-
- /**
- * Get the root property
- *
- * @return the root property
- */
- public RootProperty getRoot()
- {
- // it's always the first element in the List
- return ( RootProperty ) _properties.get(0);
- }
-
- private void populatePropertyTree(DirectoryProperty root)
- throws IOException
- {
- int index = root.getChildIndex();
-
- if (!Property.isValidIndex(index))
- {
-
- // property has no children
- return;
- }
- Stack<Property> children = new Stack<>();
-
- children.push(_properties.get(index));
- while (!children.empty())
- {
- Property property = children.pop();
- if (property == null)
- {
- // unknown / unsupported / corrupted property, skip
- continue;
- }
-
- root.addChild(property);
- if (property.isDirectory())
- {
- populatePropertyTree(( DirectoryProperty ) property);
- }
- index = property.getPreviousChildIndex();
- if (isValidIndex(index))
- {
- children.push(_properties.get(index));
- }
- index = property.getNextChildIndex();
- if (isValidIndex(index))
- {
- children.push(_properties.get(index));
- }
- }
- }
-
- protected boolean isValidIndex(int index) {
- if (! Property.isValidIndex(index))
- return false;
- if (index < 0 || index >= _properties.size()) {
- _logger.log(POILogger.WARN, "Property index " + index +
- "outside the valid range 0.."+_properties.size());
- return false;
- }
- return true;
- }
-
- /**
- * Get the start block for the property table
- *
- * @return start block index
- */
- public int getStartBlock()
- {
- return _header_block.getPropertyStart();
- }
-
- /**
- * Set the start block for this instance
- *
- * @param index index into the array of BigBlock instances making
- * up the the filesystem
- */
- public void setStartBlock(final int index)
- {
- _header_block.setPropertyStart(index);
- }
-}
* A block of block allocation table entries. BATBlocks are created
* only through a static factory method: createBATBlocks.
*/
-public final class BATBlock extends BigBlock {
+public final class BATBlock implements BlockWritable {
+ /**
+ * Either 512 bytes ({@link POIFSConstants#SMALLER_BIG_BLOCK_SIZE})
+ * or 4096 bytes ({@link POIFSConstants#LARGER_BIG_BLOCK_SIZE})
+ */
+ private POIFSBigBlockSize bigBlockSize;
+
/**
* For a regular fat block, these are 128 / 1024
* next sector values.
*/
private BATBlock(POIFSBigBlockSize bigBlockSize)
{
- super(bigBlockSize);
+ this.bigBlockSize = bigBlockSize;
int _entries_per_block = bigBlockSize.getBATEntriesPerBlock();
_values = new int[_entries_per_block];
Arrays.fill(_values, POIFSConstants.UNUSED_BLOCK);
}
- /**
- * Create a single instance initialized (perhaps partially) with entries
- *
- * @param entries the array of block allocation table entries
- * @param start_index the index of the first entry to be written
- * to the block
- * @param end_index the index, plus one, of the last entry to be
- * written to the block (writing is for all index
- * k, start_index <= k < end_index)
- */
-
- private BATBlock(POIFSBigBlockSize bigBlockSize, final int [] entries,
- final int start_index, final int end_index)
- {
- this(bigBlockSize);
- for (int k = start_index; k < end_index; k++) {
- _values[k - start_index] = entries[k];
- }
-
- // Do we have any free sectors?
- if(end_index - start_index == _values.length) {
- recomputeFree();
- }
- }
-
private void recomputeFree() {
boolean hasFree = false;
- for(int k=0; k<_values.length; k++) {
- if(_values[k] == POIFSConstants.UNUSED_BLOCK) {
- hasFree = true;
- break;
- }
- }
+ for (int _value : _values) {
+ if (_value == POIFSConstants.UNUSED_BLOCK) {
+ hasFree = true;
+ break;
+ }
+ }
_has_free_sectors = hasFree;
}
public static BATBlock createEmptyBATBlock(final POIFSBigBlockSize bigBlockSize, boolean isXBAT) {
BATBlock block = new BATBlock(bigBlockSize);
if(isXBAT) {
- block.setXBATChain(bigBlockSize, POIFSConstants.END_OF_CHAIN);
+ final int _entries_per_xbat_block = bigBlockSize.getXBATEntriesPerBlock();
+ block._values[ _entries_per_xbat_block ] = POIFSConstants.END_OF_CHAIN;
}
return block;
}
- /**
- * Create an array of BATBlocks from an array of int block
- * allocation table entries
- *
- * @param entries the array of int entries
- *
- * @return the newly created array of BATBlocks
- */
- public static BATBlock [] createBATBlocks(final POIFSBigBlockSize bigBlockSize, final int [] entries)
- {
- int block_count = calculateStorageRequirements(bigBlockSize, entries.length);
- BATBlock[] blocks = new BATBlock[ block_count ];
- int index = 0;
- int remaining = entries.length;
-
- int _entries_per_block = bigBlockSize.getBATEntriesPerBlock();
- for (int j = 0; j < entries.length; j += _entries_per_block)
- {
- blocks[ index++ ] = new BATBlock(bigBlockSize, entries, j,
- (remaining > _entries_per_block)
- ? j + _entries_per_block
- : entries.length);
- remaining -= _entries_per_block;
- }
- return blocks;
- }
-
- /**
- * Create an array of XBATBlocks from an array of int block
- * allocation table entries
- *
- * @param entries the array of int entries
- * @param startBlock the start block of the array of XBAT blocks
- *
- * @return the newly created array of BATBlocks
- */
-
- public static BATBlock [] createXBATBlocks(final POIFSBigBlockSize bigBlockSize,
- final int [] entries,
- final int startBlock)
- {
- int block_count =
- calculateXBATStorageRequirements(bigBlockSize, entries.length);
- BATBlock[] blocks = new BATBlock[ block_count ];
- int index = 0;
- int remaining = entries.length;
-
- int _entries_per_xbat_block = bigBlockSize.getXBATEntriesPerBlock();
- if (block_count != 0)
- {
- for (int j = 0; j < entries.length; j += _entries_per_xbat_block)
- {
- blocks[ index++ ] =
- new BATBlock(bigBlockSize, entries, j,
- (remaining > _entries_per_xbat_block)
- ? j + _entries_per_xbat_block
- : entries.length);
- remaining -= _entries_per_xbat_block;
- }
- for (index = 0; index < blocks.length - 1; index++)
- {
- blocks[ index ].setXBATChain(bigBlockSize, startBlock + index + 1);
- }
- blocks[ index ].setXBATChain(bigBlockSize, POIFSConstants.END_OF_CHAIN);
- }
- return blocks;
- }
-
- /**
- * Calculate how many BATBlocks are needed to hold a specified
- * number of BAT entries.
- *
- * @param entryCount the number of entries
- *
- * @return the number of BATBlocks needed
- */
- public static int calculateStorageRequirements(final POIFSBigBlockSize bigBlockSize, final int entryCount)
- {
- int _entries_per_block = bigBlockSize.getBATEntriesPerBlock();
- return (entryCount + _entries_per_block - 1) / _entries_per_block;
- }
-
- /**
- * Calculate how many XBATBlocks are needed to hold a specified
- * number of BAT entries.
- *
- * @param entryCount the number of entries
- *
- * @return the number of XBATBlocks needed
- */
- public static int calculateXBATStorageRequirements(final POIFSBigBlockSize bigBlockSize, final int entryCount)
- {
- int _entries_per_xbat_block = bigBlockSize.getXBATEntriesPerBlock();
- return (entryCount + _entries_per_xbat_block - 1)
- / _entries_per_xbat_block;
- }
-
/**
* Calculates the maximum size of a file which is addressable given the
* number of FAT (BAT) sectors specified. (We don't care if those BAT
*/
public static BATBlockAndIndex getSBATBlockAndIndex(final int offset,
final HeaderBlock header, final List<BATBlock> sbats) {
- POIFSBigBlockSize bigBlockSize = header.getBigBlockSize();
- int entriesPerBlock = bigBlockSize.getBATEntriesPerBlock();
-
- // SBATs are so much easier, as they're chained streams
- int whichSBAT = offset / entriesPerBlock;
- int index = offset % entriesPerBlock;
- return new BATBlockAndIndex( index, sbats.get(whichSBAT) );
- }
-
- private void setXBATChain(final POIFSBigBlockSize bigBlockSize, int chainIndex)
- {
- int _entries_per_xbat_block = bigBlockSize.getXBATEntriesPerBlock();
- _values[ _entries_per_xbat_block ] = chainIndex;
+ return getBATBlockAndIndex(offset, header, sbats);
}
/**
return ourBlockIndex;
}
-
- /* ********** START extension of BigBlock ********** */
-
- /**
+ /**
* Write the block's data to an OutputStream
*
* @param stream the OutputStream to which the stored data should
* @exception IOException on problems writing to the specified
* stream
*/
- void writeData(final OutputStream stream)
- throws IOException
- {
- // Save it out
- stream.write( serialize() );
+
+ public void writeBlocks(final OutputStream stream) throws IOException {
+ // Save it out
+ stream.write( serialize() );
}
-
- void writeData(final ByteBuffer block)
- throws IOException
- {
+
+ public void writeData(final ByteBuffer block) {
// Save it out
block.put( serialize() );
}
// Create the empty array
byte[] data = new byte[ bigBlockSize.getBigBlockSize() ];
- // Fill in the values
- int offset = 0;
- for(int i=0; i<_values.length; i++) {
- LittleEndian.putInt(data, offset, _values[i]);
- offset += LittleEndian.INT_SIZE;
- }
+ // Fill in the values
+ int offset = 0;
+ for (int _value : _values) {
+ LittleEndian.putInt(data, offset, _value);
+ offset += LittleEndian.INT_SIZE;
+ }
// Done
return data;
}
- /* ********** END extension of BigBlock ********** */
-
-
- public static class BATBlockAndIndex {
+ public static final class BATBlockAndIndex {
private final int index;
private final BATBlock block;
private BATBlockAndIndex(int index, BATBlock block) {
+++ /dev/null
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.poifs.storage;
-
-/**
- * Abstract base class of all POIFS block storage classes. All
- * extensions of BigBlock should write 512 or 4096 bytes of data when
- * requested to write their data (as per their BigBlockSize).
- *
- * This class has package scope, as there is no reason at this time to
- * make the class public.
- *
- * @author Marc Johnson (mjohnson at apache dot org)
- */
-
-import java.io.IOException;
-import java.io.OutputStream;
-
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.common.POIFSConstants;
-
-abstract class BigBlock
- implements BlockWritable
-{
- /**
- * Either 512 bytes ({@link POIFSConstants#SMALLER_BIG_BLOCK_SIZE})
- * or 4096 bytes ({@link POIFSConstants#LARGER_BIG_BLOCK_SIZE})
- */
- protected POIFSBigBlockSize bigBlockSize;
-
- protected BigBlock(POIFSBigBlockSize bigBlockSize) {
- this.bigBlockSize = bigBlockSize;
- }
-
- /**
- * Default implementation of write for extending classes that
- * contain their data in a simple array of bytes.
- *
- * @param stream the OutputStream to which the data should be
- * written.
- * @param data the byte array of to be written.
- *
- * @exception IOException on problems writing to the specified
- * stream.
- */
-
- protected void doWriteData(final OutputStream stream, final byte [] data)
- throws IOException
- {
- stream.write(data);
- }
-
- /**
- * Write the block's data to an OutputStream
- *
- * @param stream the OutputStream to which the stored data should
- * be written
- *
- * @exception IOException on problems writing to the specified
- * stream
- */
-
- abstract void writeData(final OutputStream stream)
- throws IOException;
-
- /* ********** START implementation of BlockWritable ********** */
-
- /**
- * Write the storage to an OutputStream
- *
- * @param stream the OutputStream to which the stored data should
- * be written
- *
- * @exception IOException on problems writing to the specified
- * stream
- */
-
- public void writeBlocks(final OutputStream stream)
- throws IOException
- {
- writeData(stream);
- }
-
- /* ********** END implementation of BlockWritable ********** */
-} // end abstract class BigBlock
-
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.IOException;
-
-import java.util.*;
-
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.*;
-
-/**
- * This class manages and creates the Block Allocation Table, which is
- * basically a set of linked lists of block indices.
- * <P>
- * Each block of the filesystem has an index. The first block, the
- * header, is skipped; the first block after the header is index 0,
- * the next is index 1, and so on.
- * <P>
- * A block's index is also its index into the Block Allocation
- * Table. The entry that it finds in the Block Allocation Table is the
- * index of the next block in the linked list of blocks making up a
- * file, or it is set to -2: end of list.
- *
- * @author Marc Johnson (mjohnson at apache dot org)
- */
-public final class BlockAllocationTableReader {
- private static final POILogger _logger = POILogFactory.getLogger(BlockAllocationTableReader.class);
-
- /**
- * Maximum number size (in blocks) of the allocation table as supported by
- * POI.<br>
- *
- * This constant has been chosen to help POI identify corrupted data in the
- * header block (rather than crash immediately with {@link OutOfMemoryError}
- * ). It's not clear if the compound document format actually specifies any
- * upper limits. For files with 512 byte blocks, having an allocation table
- * of 65,335 blocks would correspond to a total file size of 4GB. Needless
- * to say, POI probably cannot handle files anywhere near that size.
- */
- private static final int MAX_BLOCK_COUNT = 65535;
- private final IntList _entries;
- private POIFSBigBlockSize bigBlockSize;
-
- /**
- * create a BlockAllocationTableReader for an existing filesystem. Side
- * effect: when this method finishes, the BAT blocks will have
- * been removed from the raw block list, and any blocks labeled as
- * 'unused' in the block allocation table will also have been
- * removed from the raw block list.
- *
- * @param block_count the number of BAT blocks making up the block
- * allocation table
- * @param block_array the array of BAT block indices from the
- * filesystem's header
- * @param xbat_count the number of XBAT blocks
- * @param xbat_index the index of the first XBAT block
- * @param raw_block_list the list of RawDataBlocks
- *
- * @exception IOException if, in trying to create the table, we
- * encounter logic errors
- */
- public BlockAllocationTableReader(POIFSBigBlockSize bigBlockSize, int block_count, int [] block_array,
- int xbat_count, int xbat_index, BlockList raw_block_list) throws IOException {
- this(bigBlockSize);
-
- sanityCheckBlockCount(block_count);
-
- // We want to get the whole of the FAT table
- // To do this:
- // * Work through raw_block_list, which points to the
- // first (up to) 109 BAT blocks
- // * Jump to the XBAT offset, and read in XBATs which
- // point to more BAT blocks
- int limit = Math.min(block_count, block_array.length);
- int block_index;
-
- // This will hold all of the BAT blocks in order
- RawDataBlock blocks[] = new RawDataBlock[ block_count ];
-
- // Process the first (up to) 109 BAT blocks
- for (block_index = 0; block_index < limit; block_index++)
- {
- // Check that the sector number of the BAT block is a valid one
- int nextOffset = block_array[ block_index ];
- if(nextOffset > raw_block_list.blockCount()) {
- throw new IOException("Your file contains " + raw_block_list.blockCount() +
- " sectors, but the initial DIFAT array at index " + block_index +
- " referenced block # " + nextOffset + ". This isn't allowed and " +
- " your file is corrupt");
- }
- // Record the sector number of this BAT block
- blocks[ block_index ] =
- ( RawDataBlock ) raw_block_list.remove(nextOffset);
- }
-
- // Process additional BAT blocks via the XBATs
- if (block_index < block_count)
- {
-
- // must have extended blocks
- if (xbat_index < 0)
- {
- throw new IOException(
- "BAT count exceeds limit, yet XBAT index indicates no valid entries");
- }
- int chain_index = xbat_index;
- int max_entries_per_block = bigBlockSize.getXBATEntriesPerBlock();
- int chain_index_offset = bigBlockSize.getNextXBATChainOffset();
-
- // Each XBAT block contains either:
- // (maximum number of sector indexes) + index of next XBAT
- // some sector indexes + FREE sectors to max # + EndOfChain
- for (int j = 0; j < xbat_count; j++)
- {
- limit = Math.min(block_count - block_index,
- max_entries_per_block);
- byte[] data = raw_block_list.remove(chain_index).getData();
- int offset = 0;
-
- for (int k = 0; k < limit; k++)
- {
- blocks[ block_index++ ] =
- ( RawDataBlock ) raw_block_list
- .remove(LittleEndian.getInt(data, offset));
- offset += LittleEndianConsts.INT_SIZE;
- }
- chain_index = LittleEndian.getInt(data, chain_index_offset);
- if (chain_index == POIFSConstants.END_OF_CHAIN)
- {
- break;
- }
- }
- }
- if (block_index != block_count)
- {
- throw new IOException("Could not find all blocks");
- }
-
- // Now that we have all of the raw data blocks which make
- // up the FAT, go through and create the indices
- setEntries(blocks, raw_block_list);
- }
-
- /**
- * create a BlockAllocationTableReader from an array of raw data blocks
- *
- * @param blocks the raw data
- * @param raw_block_list the list holding the managed blocks
- *
- * @exception IOException
- */
- BlockAllocationTableReader(POIFSBigBlockSize bigBlockSize, ListManagedBlock[] blocks, BlockList raw_block_list)
- throws IOException {
- this(bigBlockSize);
- setEntries(blocks, raw_block_list);
- }
-
- BlockAllocationTableReader(POIFSBigBlockSize bigBlockSize) {
- this.bigBlockSize = bigBlockSize;
- _entries = new IntList();
- }
-
- public static void sanityCheckBlockCount(int block_count) throws IOException {
- if (block_count <= 0) {
- throw new IOException(
- "Illegal block count; minimum count is 1, got " +
- block_count + " instead"
- );
- }
- if (block_count > MAX_BLOCK_COUNT) {
- throw new IOException(
- "Block count " + block_count +
- " is too high. POI maximum is " + MAX_BLOCK_COUNT + "."
- );
- }
- }
-
- /**
- * walk the entries from a specified point and return the
- * associated blocks. The associated blocks are removed from the
- * block list
- *
- * @param startBlock the first block in the chain
- * @param blockList the raw data block list
- *
- * @return array of ListManagedBlocks, in their correct order
- *
- * @exception IOException if there is a problem acquiring the blocks
- */
- ListManagedBlock[] fetchBlocks(int startBlock, int headerPropertiesStartBlock,
- BlockList blockList) throws IOException {
- List<ListManagedBlock> blocks = new ArrayList<>();
- int currentBlock = startBlock;
- boolean firstPass = true;
- ListManagedBlock dataBlock = null;
-
- // Process the chain from the start to the end
- // Normally we have header, data, end
- // Sometimes we have data, header, end
- // For those cases, stop at the header, not the end
- while (currentBlock != POIFSConstants.END_OF_CHAIN) {
- try {
- // Grab the data at the current block offset
- dataBlock = blockList.remove(currentBlock);
- blocks.add(dataBlock);
- // Now figure out which block we go to next
- currentBlock = _entries.get(currentBlock);
- firstPass = false;
- } catch(IOException e) {
- if(currentBlock == headerPropertiesStartBlock) {
- // Special case where things are in the wrong order
- _logger.log(POILogger.WARN, "Warning, header block comes after data blocks in POIFS block listing");
- currentBlock = POIFSConstants.END_OF_CHAIN;
- } else if(currentBlock == 0 && firstPass) {
- // Special case where the termination isn't done right
- // on an empty set
- _logger.log(POILogger.WARN, "Warning, incorrectly terminated empty data blocks in POIFS block listing (should end at -2, ended at 0)");
- currentBlock = POIFSConstants.END_OF_CHAIN;
- } else {
- // Ripple up
- throw e;
- }
- }
- }
-
- return blocks.toArray(new ListManagedBlock[blocks.size()]);
- }
-
- // methods for debugging reader
-
- /**
- * determine whether the block specified by index is used or not
- *
- * @param index index of block in question
- *
- * @return true if the specific block is used, else false
- */
- boolean isUsed(int index) {
-
- try {
- return _entries.get(index) != -1;
- } catch (IndexOutOfBoundsException e) {
- // ignored
- return false;
- }
- }
-
- /**
- * return the next block index
- *
- * @param index of the current block
- *
- * @return index of the next block (may be
- * POIFSConstants.END_OF_CHAIN, indicating end of chain
- * (duh))
- *
- * @exception IOException if the current block is unused
- */
- int getNextBlockIndex(int index) throws IOException {
- if (isUsed(index)) {
- return _entries.get(index);
- }
- throw new IOException("index " + index + " is unused");
- }
-
- /**
- * Convert an array of blocks into a set of integer indices
- *
- * @param blocks the array of blocks containing the indices
- * @param raw_blocks the list of blocks being managed. Unused
- * blocks will be eliminated from the list
- */
- private void setEntries(ListManagedBlock[] blocks, BlockList raw_blocks) throws IOException {
- int limit = bigBlockSize.getBATEntriesPerBlock();
-
- for (int block_index = 0; block_index < blocks.length; block_index++)
- {
- byte[] data = blocks[ block_index ].getData();
- int offset = 0;
-
- for (int k = 0; k < limit; k++)
- {
- int entry = LittleEndian.getInt(data, offset);
-
- if (entry == POIFSConstants.UNUSED_BLOCK)
- {
- raw_blocks.zap(_entries.size());
- }
- _entries.add(entry);
- offset += LittleEndianConsts.INT_SIZE;
- }
-
- // discard block
- blocks[ block_index ] = null;
- }
- raw_blocks.setBAT(this);
- }
-
- @Internal
- public IntList getEntries() {
- return _entries;
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.filesystem.BATManaged;
-import org.apache.poi.util.IntList;
-
-/**
- * This class manages and creates the Block Allocation Table, which is
- * basically a set of linked lists of block indices.
- * <P>
- * Each block of the filesystem has an index. The first block, the
- * header, is skipped; the first block after the header is index 0,
- * the next is index 1, and so on.
- * <P>
- * A block's index is also its index into the Block Allocation
- * Table. The entry that it finds in the Block Allocation Table is the
- * index of the next block in the linked list of blocks making up a
- * file, or it is set to -2: end of list.
- *
- * @author Marc Johnson (mjohnson at apache dot org)
- */
-public final class BlockAllocationTableWriter implements BlockWritable, BATManaged {
- private IntList _entries;
- private BATBlock[] _blocks;
- private int _start_block;
- private POIFSBigBlockSize _bigBlockSize;
-
- /**
- * create a BlockAllocationTableWriter
- */
- public BlockAllocationTableWriter(POIFSBigBlockSize bigBlockSize)
- {
- _bigBlockSize = bigBlockSize;
- _start_block = POIFSConstants.END_OF_CHAIN;
- _entries = new IntList();
- _blocks = new BATBlock[ 0 ];
- }
-
- /**
- * Create the BATBlocks we need
- *
- * @return start block index of BAT blocks
- */
- public int createBlocks()
- {
- int xbat_blocks = 0;
- int bat_blocks = 0;
-
- while (true)
- {
- int calculated_bat_blocks =
- BATBlock.calculateStorageRequirements(_bigBlockSize,
- bat_blocks
- + xbat_blocks
- + _entries.size());
- int calculated_xbat_blocks =
- HeaderBlockWriter.calculateXBATStorageRequirements(
- _bigBlockSize, calculated_bat_blocks);
-
- if ((bat_blocks == calculated_bat_blocks)
- && (xbat_blocks == calculated_xbat_blocks))
- {
-
- // stable ... we're OK
- break;
- }
- bat_blocks = calculated_bat_blocks;
- xbat_blocks = calculated_xbat_blocks;
- }
- int startBlock = allocateSpace(bat_blocks);
-
- allocateSpace(xbat_blocks);
- simpleCreateBlocks();
- return startBlock;
- }
-
- /**
- * Allocate space for a block of indices
- *
- * @param blockCount the number of blocks to allocate space for
- *
- * @return the starting index of the blocks
- */
- public int allocateSpace(final int blockCount)
- {
- int startBlock = _entries.size();
-
- if (blockCount > 0)
- {
- int limit = blockCount - 1;
- int index = startBlock + 1;
-
- for (int k = 0; k < limit; k++)
- {
- _entries.add(index++);
- }
- _entries.add(POIFSConstants.END_OF_CHAIN);
- }
- return startBlock;
- }
-
- /**
- * get the starting block
- *
- * @return the starting block index
- */
- public int getStartBlock()
- {
- return _start_block;
- }
-
- /**
- * create the BATBlocks
- */
- void simpleCreateBlocks()
- {
- _blocks = BATBlock.createBATBlocks(_bigBlockSize, _entries.toArray());
- }
-
- /**
- * Write the storage to an OutputStream
- *
- * @param stream the OutputStream to which the stored data should
- * be written
- *
- * @exception IOException on problems writing to the specified
- * stream
- */
- public void writeBlocks(final OutputStream stream)
- throws IOException
- {
- for (int j = 0; j < _blocks.length; j++)
- {
- _blocks[ j ].writeBlocks(stream);
- }
- }
-
- /**
- * Write the BAT into its associated block
- */
- public static void writeBlock(final BATBlock bat, final ByteBuffer block)
- throws IOException
- {
- bat.writeData(block);
- }
-
- /**
- * Return the number of BigBlock's this instance uses
- *
- * @return count of BigBlock instances
- */
- public int countBlocks()
- {
- return _blocks.length;
- }
-
- /**
- * Set the start block for this instance
- */
- public void setStartBlock(int start_block)
- {
- _start_block = start_block;
- }
-}
+++ /dev/null
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.poifs.storage;
-
-import java.io.IOException;
-
-/**
- * Interface for lists of blocks that are mapped by block allocation
- * tables
- *
- * @author Marc Johnson (mjohnson at apache dot org
- */
-
-public interface BlockList
-{
-
- /**
- * remove the specified block from the list
- *
- * @param index the index of the specified block; if the index is
- * out of range, that's ok
- */
-
- void zap(final int index);
-
- /**
- * remove and return the specified block from the list
- *
- * @param index the index of the specified block
- *
- * @return the specified block
- *
- * @exception IOException if the index is out of range or has
- * already been removed
- */
-
- ListManagedBlock remove(final int index) throws IOException;
-
- /**
- * get the blocks making up a particular stream in the list. The
- * blocks are removed from the list.
- *
- * @param startBlock the index of the first block in the stream
- * @param headerPropertiesStartBlock the index of the first header block in the stream
- *
- * @return the stream as an array of correctly ordered blocks
- *
- * @exception IOException if blocks are missing
- */
-
- ListManagedBlock [] fetchBlocks(final int startBlock, final int headerPropertiesStartBlock)
- throws IOException;
-
- /**
- * set the associated BlockAllocationTable
- *
- * @param bat the associated BlockAllocationTable
- *
- * @exception IOException
- */
-
- void setBAT(final BlockAllocationTableReader bat) throws IOException;
-
- int blockCount();
-} // end public interface BlockList
-
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.IOException;
-
-import org.apache.poi.util.Internal;
-
-/**
- * A simple implementation of BlockList
- *
- * @author Marc Johnson (mjohnson at apache dot org
- */
-abstract class BlockListImpl implements BlockList {
- private ListManagedBlock[] _blocks;
- private BlockAllocationTableReader _bat;
-
- protected BlockListImpl()
- {
- _blocks = new ListManagedBlock[ 0 ];
- _bat = null;
- }
-
- /**
- * provide blocks to manage
- *
- * @param blocks blocks to be managed
- */
- protected void setBlocks(final ListManagedBlock [] blocks)
- {
- _blocks = blocks.clone();
- }
-
- /**
- * remove the specified block from the list
- *
- * @param index the index of the specified block; if the index is
- * out of range, that's ok
- */
- public void zap(final int index)
- {
- if ((index >= 0) && (index < _blocks.length))
- {
- _blocks[ index ] = null;
- }
- }
-
- /**
- * Internal method. Gets, without sanity checks or
- * removing.
- */
- @Internal
- public ListManagedBlock get(final int index) {
- return _blocks[index];
- }
-
- /**
- * remove and return the specified block from the list
- *
- * @param index the index of the specified block
- *
- * @return the specified block
- *
- * @exception IOException if the index is out of range or has
- * already been removed
- */
- public ListManagedBlock remove(final int index)
- throws IOException
- {
- ListManagedBlock result = null;
-
- try
- {
- result = _blocks[ index ];
- if (result == null)
- {
- throw new IOException(
- "block[ " + index + " ] already removed - " +
- "does your POIFS have circular or duplicate block references?"
- );
- }
- _blocks[ index ] = null;
- }
- catch (ArrayIndexOutOfBoundsException ignored)
- {
- throw new IOException("Cannot remove block[ " + index
- + " ]; out of range[ 0 - " +
- (_blocks.length-1) + " ]");
- }
- return result;
- }
-
- /**
- * get the blocks making up a particular stream in the list. The
- * blocks are removed from the list.
- *
- * @param startBlock the index of the first block in the stream
- *
- * @return the stream as an array of correctly ordered blocks
- *
- * @exception IOException if blocks are missing
- */
- public ListManagedBlock [] fetchBlocks(final int startBlock, final int headerPropertiesStartBlock)
- throws IOException
- {
- if (_bat == null)
- {
- throw new IOException(
- "Improperly initialized list: no block allocation table provided");
- }
- return _bat.fetchBlocks(startBlock, headerPropertiesStartBlock, this);
- }
-
- /**
- * set the associated BlockAllocationTable
- *
- * @param bat the associated BlockAllocationTable
- */
- public void setBAT(final BlockAllocationTableReader bat)
- throws IOException
- {
- if (_bat != null)
- {
- throw new IOException(
- "Attempt to replace existing BlockAllocationTable");
- }
- _bat = bat;
- }
-
- /**
- * Returns the count of the number of blocks
- */
- public int blockCount() {
- return _blocks.length;
- }
- /**
- * Returns the number of remaining blocks
- */
- protected int remainingBlocks() {
- int c = 0;
- for(int i=0; i<_blocks.length; i++) {
- if(_blocks[i] != null) c++;
- }
- return c;
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-/**
- * Wraps a <tt>byte</tt> array and provides simple data input access.
- * Internally, this class maintains a buffer read index, so that for the most part, primitive
- * data can be read in a data-input-stream-like manner.<p>
- *
- * Note - the calling class should call the {@link #available()} method to detect end-of-buffer
- * and move to the next data block when the current is exhausted.
- * For optimisation reasons, no error handling is performed in this class. Thus, mistakes in
- * calling code ran may raise ugly exceptions here, like {@link ArrayIndexOutOfBoundsException},
- * etc .<p>
- *
- * The multi-byte primitive input methods ({@link #readUShortLE()}, {@link #readIntLE()} and
- * {@link #readLongLE()}) have corresponding 'spanning read' methods which (when required) perform
- * a read across the block boundary. These spanning read methods take the previous
- * {@link DataInputBlock} as a parameter.
- * Reads of larger amounts of data (into <tt>byte</tt> array buffers) must be managed by the caller
- * since these could conceivably involve more than two blocks.
- *
- * @author Josh Micich
- */
-public final class DataInputBlock {
-
- /**
- * Possibly any size (usually 512K or 64K). Assumed to be at least 8 bytes for all blocks
- * before the end of the stream. The last block in the stream can be any size except zero.
- */
- private final byte[] _buf;
- private int _readIndex;
- private int _maxIndex;
-
- DataInputBlock(byte[] data, int startOffset) { // NOSONAR
- _buf = data;
- _readIndex = startOffset;
- _maxIndex = _buf.length;
- }
- public int available() {
- return _maxIndex-_readIndex;
- }
-
- public int readUByte() {
- return _buf[_readIndex++] & 0xFF;
- }
-
- /**
- * Reads a <tt>short</tt> which was encoded in <em>little endian</em> format.
- */
- public int readUShortLE() {
- int i = _readIndex;
-
- int b0 = _buf[i++] & 0xFF;
- int b1 = _buf[i++] & 0xFF;
- _readIndex = i;
- return (b1 << 8) + (b0 << 0);
- }
-
- /**
- * Reads a <tt>short</tt> which spans the end of <tt>prevBlock</tt> and the start of this block.
- */
- public int readUShortLE(DataInputBlock prevBlock) {
- // simple case - will always be one byte in each block
- int i = prevBlock._buf.length-1;
-
- int b0 = prevBlock._buf[i] & 0xFF;
- int b1 = _buf[_readIndex++] & 0xFF;
- return (b1 << 8) + (b0 << 0);
- }
-
- /**
- * Reads an <tt>int</tt> which was encoded in <em>little endian</em> format.
- */
- public int readIntLE() {
- int i = _readIndex;
-
- int b0 = _buf[i++] & 0xFF;
- int b1 = _buf[i++] & 0xFF;
- int b2 = _buf[i++] & 0xFF;
- int b3 = _buf[i++] & 0xFF;
- _readIndex = i;
- return (b3 << 24) + (b2 << 16) + (b1 << 8) + (b0 << 0);
- }
-
- /**
- * Reads an <tt>int</tt> which spans the end of <tt>prevBlock</tt> and the start of this block.
- */
- public int readIntLE(DataInputBlock prevBlock, int prevBlockAvailable) {
- byte[] buf = new byte[4];
-
- readSpanning(prevBlock, prevBlockAvailable, buf);
- int b0 = buf[0] & 0xFF;
- int b1 = buf[1] & 0xFF;
- int b2 = buf[2] & 0xFF;
- int b3 = buf[3] & 0xFF;
- return (b3 << 24) + (b2 << 16) + (b1 << 8) + (b0 << 0);
- }
-
- /**
- * Reads a <tt>long</tt> which was encoded in <em>little endian</em> format.
- */
- public long readLongLE() {
- int i = _readIndex;
-
- int b0 = _buf[i++] & 0xFF;
- int b1 = _buf[i++] & 0xFF;
- int b2 = _buf[i++] & 0xFF;
- int b3 = _buf[i++] & 0xFF;
- int b4 = _buf[i++] & 0xFF;
- int b5 = _buf[i++] & 0xFF;
- int b6 = _buf[i++] & 0xFF;
- int b7 = _buf[i++] & 0xFF;
- _readIndex = i;
- return (((long)b7 << 56) +
- ((long)b6 << 48) +
- ((long)b5 << 40) +
- ((long)b4 << 32) +
- ((long)b3 << 24) +
- (b2 << 16) +
- (b1 << 8) +
- (b0 << 0));
- }
-
- /**
- * Reads a <tt>long</tt> which spans the end of <tt>prevBlock</tt> and the start of this block.
- */
- public long readLongLE(DataInputBlock prevBlock, int prevBlockAvailable) {
- byte[] buf = new byte[8];
-
- readSpanning(prevBlock, prevBlockAvailable, buf);
-
- int b0 = buf[0] & 0xFF;
- int b1 = buf[1] & 0xFF;
- int b2 = buf[2] & 0xFF;
- int b3 = buf[3] & 0xFF;
- int b4 = buf[4] & 0xFF;
- int b5 = buf[5] & 0xFF;
- int b6 = buf[6] & 0xFF;
- int b7 = buf[7] & 0xFF;
- return (((long)b7 << 56) +
- ((long)b6 << 48) +
- ((long)b5 << 40) +
- ((long)b4 << 32) +
- ((long)b3 << 24) +
- (b2 << 16) +
- (b1 << 8) +
- (b0 << 0));
- }
-
- /**
- * Reads a small amount of data from across the boundary between two blocks.
- * The {@link #_readIndex} of this (the second) block is updated accordingly.
- * Note- this method (and other code) assumes that the second {@link DataInputBlock}
- * always is big enough to complete the read without being exhausted.
- */
- private void readSpanning(DataInputBlock prevBlock, int prevBlockAvailable, byte[] buf) {
- System.arraycopy(prevBlock._buf, prevBlock._readIndex, buf, 0, prevBlockAvailable);
- int secondReadLen = buf.length-prevBlockAvailable;
- System.arraycopy(_buf, 0, buf, prevBlockAvailable, secondReadLen);
- _readIndex = secondReadLen;
- }
-
- /**
- * Reads <tt>len</tt> bytes from this block into the supplied buffer.
- */
- public void readFully(byte[] buf, int off, int len) {
- System.arraycopy(_buf, _readIndex, buf, off, len);
- _readIndex += len;
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.Arrays;
-
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.IOUtils;
-
-/**
- * A block of document data.
- *
- * @author Marc Johnson (mjohnson at apache dot org)
- */
-public final class DocumentBlock extends BigBlock {
-
- //arbitrarily selected; may need to increase
- private static final int MAX_RECORD_LENGTH = 100_000;
-
- private static final byte _default_value = ( byte ) 0xFF;
- private byte[] _data;
- private int _bytes_read;
-
- /**
- * create a document block from a raw data block
- *
- * @param block the raw data block
- *
- * @exception IOException
- */
-
- public DocumentBlock(final RawDataBlock block)
- throws IOException
- {
- super(
- block.getBigBlockSize() == POIFSConstants.SMALLER_BIG_BLOCK_SIZE ?
- POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS :
- POIFSConstants.LARGER_BIG_BLOCK_SIZE_DETAILS
- );
- _data = block.getData();
- _bytes_read = _data.length;
- }
-
- /**
- * Create a single instance initialized with data.
- *
- * @param stream the InputStream delivering the data.
- *
- * @exception IOException
- */
-
- public DocumentBlock(final InputStream stream, POIFSBigBlockSize bigBlockSize)
- throws IOException
- {
- this(bigBlockSize);
- int count = IOUtils.readFully(stream, _data);
-
- _bytes_read = (count == -1) ? 0
- : count;
- }
-
- /**
- * Create a single instance initialized with default values
- */
-
- private DocumentBlock(POIFSBigBlockSize bigBlockSize)
- {
- super(bigBlockSize);
- _data = IOUtils.safelyAllocate(bigBlockSize.getBigBlockSize(), MAX_RECORD_LENGTH);
- Arrays.fill(_data, _default_value);
- }
-
- /**
- * Get the number of bytes read for this block
- *
- * @return bytes read into the block
- */
-
- public int size()
- {
- return _bytes_read;
- }
-
- /**
- * Was this a partially read block?
- *
- * @return true if the block was only partially filled with data
- */
-
- public boolean partiallyRead()
- {
- return _bytes_read != bigBlockSize.getBigBlockSize();
- }
-
- /**
- * @return the fill byte used
- */
-
- public static byte getFillByte()
- {
- return _default_value;
- }
-
- /**
- * convert a single long array into an array of DocumentBlock
- * instances
- *
- * @param array the byte array to be converted
- * @param size the intended size of the array (which may be smaller)
- *
- * @return an array of DocumentBlock instances, filled from the
- * input array
- */
-
- public static DocumentBlock [] convert(final POIFSBigBlockSize bigBlockSize,
- final byte [] array,
- final int size)
- {
- DocumentBlock[] rval =
- new DocumentBlock[ (size + bigBlockSize.getBigBlockSize() - 1) / bigBlockSize.getBigBlockSize() ];
- int offset = 0;
-
- for (int k = 0; k < rval.length; k++)
- {
- rval[ k ] = new DocumentBlock(bigBlockSize);
- if (offset < array.length)
- {
- int length = Math.min(bigBlockSize.getBigBlockSize(),
- array.length - offset);
-
- System.arraycopy(array, offset, rval[ k ]._data, 0, length);
- if (length != bigBlockSize.getBigBlockSize())
- {
- Arrays.fill(rval[ k ]._data, length,
- bigBlockSize.getBigBlockSize(),
- _default_value);
- }
- }
- else
- {
- Arrays.fill(rval[ k ]._data, _default_value);
- }
- offset += bigBlockSize.getBigBlockSize();
- }
- return rval;
- }
-
- public static DataInputBlock getDataInputBlock(DocumentBlock[] blocks, int offset) {
- if(blocks == null || blocks.length == 0) {
- return null;
- }
-
- // Key things about the size of the block
- POIFSBigBlockSize bigBlockSize = blocks[0].bigBlockSize;
- int BLOCK_SHIFT = bigBlockSize.getHeaderValue();
- int BLOCK_SIZE = bigBlockSize.getBigBlockSize();
- int BLOCK_MASK = BLOCK_SIZE - 1;
-
- // Now do the offset lookup
- int firstBlockIndex = offset >> BLOCK_SHIFT;
- int firstBlockOffset= offset & BLOCK_MASK;
- return new DataInputBlock(blocks[firstBlockIndex]._data, firstBlockOffset);
- }
-
- /* ********** START extension of BigBlock ********** */
-
- /**
- * Write the block's data to an OutputStream
- *
- * @param stream the OutputStream to which the stored data should
- * be written
- *
- * @exception IOException on problems writing to the specified
- * stream
- */
-
- void writeData(final OutputStream stream)
- throws IOException
- {
- doWriteData(stream, _data);
- }
-
- /* ********** END extension of BigBlock ********** */
-} // end public class DocumentBlock
-
byte[] data = new byte[512];
int bsCount = IOUtils.readFully(stream, data);
if(bsCount != 512) {
- throw alertShortRead(bsCount, 512);
+ throw alertShortRead(bsCount);
}
return data;
}
- private static IOException alertShortRead(int pRead, int expectedReadSize) {
+ private static IOException alertShortRead(int pRead) {
int read;
if (pRead < 0) {
//Can't have -1 bytes read in the error message!
String type = " byte" + (read == 1 ? (""): ("s"));
return new IOException("Unable to read entire header; "
- + read + type + " read; expected "
- + expectedReadSize + " bytes");
+ + read + type + " read; expected 512 bytes");
}
/**
* @exception IOException on problems writing to the specified
* stream
*/
- void writeData(final OutputStream stream) throws IOException {
+ public void writeData(final OutputStream stream) throws IOException {
// Update the counts and start positions
new IntegerField(_bat_count_offset, _bat_count, _data);
new IntegerField(_property_start_offset, _property_start, _data);
+++ /dev/null
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.poifs.storage;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.common.POIFSConstants;
-
-/**
- * The block containing the archive header
- *
- * @author Marc Johnson (mjohnson at apache dot org)
- */
-public class HeaderBlockWriter implements HeaderBlockConstants, BlockWritable
-{
- private final HeaderBlock _header_block;
-
- /**
- * Create a single instance initialized with default values
- */
- public HeaderBlockWriter(POIFSBigBlockSize bigBlockSize)
- {
- _header_block = new HeaderBlock(bigBlockSize);
- }
-
- /**
- * Create a single instance initialized with the specified
- * existing values
- */
- public HeaderBlockWriter(HeaderBlock headerBlock)
- {
- _header_block = headerBlock;
- }
-
- /**
- * Set BAT block parameters. Assumes that all BAT blocks are
- * contiguous. Will construct XBAT blocks if necessary and return
- * the array of newly constructed XBAT blocks.
- *
- * @param blockCount count of BAT blocks
- * @param startBlock index of first BAT block
- *
- * @return array of XBAT blocks; may be zero length, will not be
- * null
- */
-
- public BATBlock [] setBATBlocks(final int blockCount,
- final int startBlock)
- {
- BATBlock[] rvalue;
- POIFSBigBlockSize bigBlockSize = _header_block.getBigBlockSize();
-
- _header_block.setBATCount(blockCount);
-
- // Set the BAT locations
- int limit = Math.min(blockCount, _max_bats_in_header);
- int[] bat_blocks = new int[limit];
- for (int j = 0; j < limit; j++) {
- bat_blocks[j] = startBlock + j;
- }
- _header_block.setBATArray(bat_blocks);
-
- // Now do the XBATs
- if (blockCount > _max_bats_in_header)
- {
- int excess_blocks = blockCount - _max_bats_in_header;
- int[] excess_block_array = new int[ excess_blocks ];
-
- for (int j = 0; j < excess_blocks; j++)
- {
- excess_block_array[ j ] = startBlock + j
- + _max_bats_in_header;
- }
- rvalue = BATBlock.createXBATBlocks(bigBlockSize, excess_block_array,
- startBlock + blockCount);
- _header_block.setXBATStart(startBlock + blockCount);
- }
- else
- {
- rvalue = BATBlock.createXBATBlocks(bigBlockSize, new int[ 0 ], 0);
- _header_block.setXBATStart(POIFSConstants.END_OF_CHAIN);
- }
- _header_block.setXBATCount(rvalue.length);
- return rvalue;
- }
-
- /**
- * Set start of Property Table
- *
- * @param startBlock the index of the first block of the Property
- * Table
- */
- public void setPropertyStart(final int startBlock)
- {
- _header_block.setPropertyStart(startBlock);
- }
-
- /**
- * Set start of small block allocation table
- *
- * @param startBlock the index of the first big block of the small
- * block allocation table
- */
- public void setSBATStart(final int startBlock)
- {
- _header_block.setSBATStart(startBlock);
- }
-
- /**
- * Set count of SBAT blocks
- *
- * @param count the number of SBAT blocks
- */
- public void setSBATBlockCount(final int count)
- {
- _header_block.setSBATBlockCount(count);
- }
-
- /**
- * For a given number of BAT blocks, calculate how many XBAT
- * blocks will be needed
- *
- * @param blockCount number of BAT blocks
- *
- * @return number of XBAT blocks needed
- */
-
- static int calculateXBATStorageRequirements(POIFSBigBlockSize bigBlockSize, final int blockCount)
- {
- return (blockCount > _max_bats_in_header)
- ? BATBlock.calculateXBATStorageRequirements(
- bigBlockSize, blockCount - _max_bats_in_header)
- : 0;
- }
-
- /* ********** START extension of BigBlock ********** */
-
- /**
- * Write the block's data to an OutputStream
- *
- * @param stream the OutputStream to which the stored data should
- * be written
- *
- * @exception IOException on problems writing to the specified
- * stream
- */
- public void writeBlocks(final OutputStream stream)
- throws IOException
- {
- _header_block.writeData(stream);
- }
-
- /**
- * Write the block's data to an existing block
- *
- * @param block the ByteBuffer of the block to which the
- * stored data should be written
- *
- * @exception IOException on problems writing to the block
- */
- public void writeBlock(ByteBuffer block)
- throws IOException
- {
- ByteArrayOutputStream baos = new ByteArrayOutputStream(
- _header_block.getBigBlockSize().getBigBlockSize()
- );
- _header_block.writeData(baos);
-
- block.put(baos.toByteArray());
- }
-
- /* ********** END extension of BigBlock ********** */
-} // end public class HeaderBlockWriter
-
+++ /dev/null
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.poifs.storage;
-
-import java.io.IOException;
-
-/**
- * An interface for blocks managed by a list that works with a
- * BlockAllocationTable to keep block sequences straight
- *
- * @author Marc Johnson (mjohnson at apache dot org
- */
-
-public interface ListManagedBlock
-{
-
- /**
- * Get the data from the block
- *
- * @return the block's data as a byte array
- *
- * @exception IOException if there is no data
- */
-
- public byte [] getData()
- throws IOException;
-} // end public interface ListManagedBlock
-
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.List;
-
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.property.Property;
-
-/**
- * A block of Property instances
- *
- * @author Marc Johnson (mjohnson at apache dot org)
- */
-public final class PropertyBlock extends BigBlock {
- private Property[] _properties;
-
- /**
- * Create a single instance initialized with default values
- *
- * @param properties the properties to be inserted
- * @param offset the offset into the properties array
- */
-
- private PropertyBlock(final POIFSBigBlockSize bigBlockSize, final Property [] properties, final int offset)
- {
- super(bigBlockSize);
-
- _properties = new Property[ bigBlockSize.getPropertiesPerBlock() ];
- for (int j = 0; j < _properties.length; j++)
- {
- _properties[ j ] = properties[ j + offset ];
- }
- }
-
- /**
- * Create an array of PropertyBlocks from an array of Property
- * instances, creating empty Property instances to make up any
- * shortfall
- *
- * @param properties the Property instances to be converted into
- * PropertyBlocks, in a java List
- *
- * @return the array of newly created PropertyBlock instances
- */
-
- public static BlockWritable [] createPropertyBlockArray(
- final POIFSBigBlockSize bigBlockSize, final List<Property> properties)
- {
- int _properties_per_block = bigBlockSize.getPropertiesPerBlock();
- int block_count =
- (properties.size() + _properties_per_block - 1)
- / _properties_per_block;
- Property[] to_be_written =
- new Property[ block_count * _properties_per_block ];
-
- System.arraycopy(properties.toArray(new Property[ 0 ]), 0,
- to_be_written, 0, properties.size());
- for (int j = properties.size(); j < to_be_written.length; j++)
- {
-
- // create an instance of an anonymous inner class that
- // extends Property
- to_be_written[ j ] = new Property()
- {
- protected void preWrite()
- {
- }
-
- public boolean isDirectory()
- {
- return false;
- }
- };
- }
- BlockWritable[] rvalue = new BlockWritable[ block_count ];
-
- for (int j = 0; j < block_count; j++)
- {
- rvalue[ j ] = new PropertyBlock(bigBlockSize, to_be_written,
- j * _properties_per_block);
- }
- return rvalue;
- }
-
- /* ********** START extension of BigBlock ********** */
-
- /**
- * Write the block's data to an OutputStream
- *
- * @param stream the OutputStream to which the stored data should
- * be written
- *
- * @exception IOException on problems writing to the specified
- * stream
- */
-
- void writeData(final OutputStream stream)
- throws IOException
- {
- int _properties_per_block = bigBlockSize.getPropertiesPerBlock();
- for (int j = 0; j < _properties_per_block; j++)
- {
- _properties[ j ].writeData(stream);
- }
- }
-
- /* ********** END extension of BigBlock ********** */
-} // end public class PropertyBlock
-
+++ /dev/null
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.poifs.storage;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.IOUtils;
-import org.apache.poi.util.POILogFactory;
-import org.apache.poi.util.POILogger;
-
-import java.io.*;
-
-/**
- * A big block created from an InputStream, holding the raw data
- *
- * @author Marc Johnson (mjohnson at apache dot org
- */
-
-public class RawDataBlock
- implements ListManagedBlock
-{
- //arbitrarily selected; may need to increase
- private static final int MAX_RECORD_LENGTH = 100_000;
-
- private byte[] _data;
- private boolean _eof;
- private boolean _hasData;
- static POILogger log = POILogFactory.getLogger(RawDataBlock.class);
-
- /**
- * Constructor RawDataBlock
- *
- * @param stream the InputStream from which the data will be read
- *
- * @exception IOException on I/O errors, and if an insufficient
- * amount of data is read (the InputStream must
- * be an exact multiple of the block size)
- */
- public RawDataBlock(final InputStream stream)
- throws IOException {
- this(stream, POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
- }
- /**
- * Constructor RawDataBlock
- *
- * @param stream the InputStream from which the data will be read
- * @param blockSize the size of the POIFS blocks, normally 512 bytes
- * {@link org.apache.poi.poifs.common.POIFSConstants#SMALLER_BIG_BLOCK_SIZE}
- *
- * @exception IOException on I/O errors, and if an insufficient
- * amount of data is read (the InputStream must
- * be an exact multiple of the block size)
- */
- public RawDataBlock(final InputStream stream, int blockSize)
- throws IOException {
- _data = IOUtils.safelyAllocate(blockSize, MAX_RECORD_LENGTH);
- int count = IOUtils.readFully(stream, _data);
- _hasData = (count > 0);
-
- if (count == -1) {
- _eof = true;
- }
- else if (count != blockSize) {
- // IOUtils.readFully will always read the
- // requested number of bytes, unless it hits
- // an EOF
- _eof = true;
- String type = " byte" + ((count == 1) ? ("")
- : ("s"));
-
- log.log(POILogger.ERROR,
- "Unable to read entire block; " + count
- + type + " read before EOF; expected "
- + blockSize + " bytes. Your document "
- + "was either written by software that "
- + "ignores the spec, or has been truncated!"
- );
- }
- else {
- _eof = false;
- }
- }
-
- /**
- * When we read the data, did we hit end of file?
- *
- * @return true if the EoF was hit during this block, or
- * false if not. If you have a dodgy short last block, then
- * it's possible to both have data, and also hit EoF...
- */
- public boolean eof() {
- return _eof;
- }
- /**
- * Did we actually find any data to read? It's possible,
- * in the event of a short last block, to both have hit
- * the EoF, but also to have data
- */
- public boolean hasData() {
- return _hasData;
- }
-
- public String toString() {
- return "RawDataBlock of size " + _data.length;
- }
-
- /* ********** START implementation of ListManagedBlock ********** */
-
- /**
- * Get the data from the block
- *
- * @return the block's data as a byte array
- *
- * @exception IOException if there is no data
- */
- public byte [] getData()
- throws IOException
- {
- if (! hasData())
- {
- throw new IOException("Cannot return empty data");
- }
- return _data;
- }
-
- /**
- * What's the big block size?
- */
- public int getBigBlockSize() {
- return _data.length;
- }
-
- /* ********** END implementation of ListManagedBlock ********** */
-} // end public class RawDataBlock
-
+++ /dev/null
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.poifs.storage;
-
-import java.io.*;
-
-import java.util.*;
-
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-
-/**
- * A list of RawDataBlocks instances, and methods to manage the list
- *
- * @author Marc Johnson (mjohnson at apache dot org
- */
-
-public class RawDataBlockList
- extends BlockListImpl
-{
-
- /**
- * Constructor RawDataBlockList
- *
- * @param stream the InputStream from which the data will be read
- * @param bigBlockSize The big block size, either 512 bytes or 4096 bytes
- *
- * @exception IOException on I/O errors, and if an incomplete
- * block is read
- */
-
- public RawDataBlockList(final InputStream stream, POIFSBigBlockSize bigBlockSize)
- throws IOException
- {
- List<RawDataBlock> blocks = new ArrayList<>();
-
- while (true)
- {
- RawDataBlock block = new RawDataBlock(stream, bigBlockSize.getBigBlockSize());
-
- // If there was data, add the block to the list
- if(block.hasData()) {
- blocks.add(block);
- }
-
- // If the stream is now at the End Of File, we're done
- if (block.eof()) {
- break;
- }
- }
- setBlocks( blocks.toArray(new RawDataBlock[ blocks.size() ]) );
- }
-} // end public class RawDataBlockList
-
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentFactoryHelper;
import org.apache.poi.poifs.filesystem.FileMagic;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
import org.apache.poi.util.IOUtils;
/**
* Creates a SlideShow from the given NPOIFSFileSystem.
*
- * @param fs The {@link NPOIFSFileSystem} to read the document from
+ * @param fs The {@link POIFSFileSystem} to read the document from
*
* @return The created SlideShow
*
public static <
S extends Shape<S,P>,
P extends TextParagraph<S,P,? extends TextRun>
- > SlideShow<S,P> create(NPOIFSFileSystem fs) throws IOException {
+ > SlideShow<S,P> create(POIFSFileSystem fs) throws IOException {
return create(fs, null);
}
* Creates a SlideShow from the given NPOIFSFileSystem, which may
* be password protected
*
- * @param fs The {@link NPOIFSFileSystem} to read the document from
+ * @param fs The {@link POIFSFileSystem} to read the document from
* @param password The password that should be used or null if no password is necessary.
*
* @return The created SlideShow
public static <
S extends Shape<S,P>,
P extends TextParagraph<S,P,? extends TextRun>
- > SlideShow<S,P> create(final NPOIFSFileSystem fs, String password) throws IOException {
+ > SlideShow<S,P> create(final POIFSFileSystem fs, String password) throws IOException {
return create(fs.getRoot(), password);
}
switch (fm) {
case OLE2:
- NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
+ POIFSFileSystem fs = new POIFSFileSystem(is);
return create(fs, password);
case OOXML:
return createXSLFSlideShow(is);
throw new FileNotFoundException(file.toString());
}
- NPOIFSFileSystem fs = null;
+ POIFSFileSystem fs = null;
try {
- fs = new NPOIFSFileSystem(file, readOnly);
+ fs = new POIFSFileSystem(file, readOnly);
return create(fs, password);
} catch(OfficeXmlFileException e) {
IOUtils.closeQuietly(fs);
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentFactoryHelper;
import org.apache.poi.poifs.filesystem.FileMagic;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.Removal;
* Note that in order to properly release resources the
* Workbook should be closed after use.
*
- * @param fs The {@link NPOIFSFileSystem} to read the document from
+ * @param fs The {@link POIFSFileSystem} to read the document from
*
* @return The created workbook
*
* @throws IOException if an error occurs while reading the data
*/
- public static Workbook create(NPOIFSFileSystem fs) throws IOException {
+ public static Workbook create(POIFSFileSystem fs) throws IOException {
return create(fs, null);
}
* Creates a Workbook from the given NPOIFSFileSystem, which may
* be password protected
*
- * @param fs The {@link NPOIFSFileSystem} to read the document from
+ * @param fs The {@link POIFSFileSystem} to read the document from
* @param password The password that should be used or null if no password is necessary.
*
* @return The created Workbook
*
* @throws IOException if an error occurs while reading the data
*/
- private static Workbook create(final NPOIFSFileSystem fs, String password) throws IOException {
+ private static Workbook create(final POIFSFileSystem fs, String password) throws IOException {
return create(fs.getRoot(), password);
}
switch (fm) {
case OLE2:
- NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
+ POIFSFileSystem fs = new POIFSFileSystem(is);
return create(fs, password);
case OOXML:
return createXSSFWorkbook(is);
throw new FileNotFoundException(file.toString());
}
- NPOIFSFileSystem fs = null;
+ POIFSFileSystem fs = null;
try {
- fs = new NPOIFSFileSystem(file, readOnly);
+ fs = new POIFSFileSystem(file, readOnly);
return create(fs, password);
} catch(OfficeXmlFileException e) {
IOUtils.closeQuietly(fs);
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.usermodel.Sheet;
/**
* Dump out the aggregated escher records
*/
-public class DrawingDump
-{
+public final class DrawingDump {
+ private DrawingDump() {
+ }
+
public static void main( String[] args ) throws IOException {
OutputStreamWriter osw = new OutputStreamWriter(System.out, Charset.defaultCharset());
PrintWriter pw = new PrintWriter(osw);
- NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(args[0]));
+ POIFSFileSystem fs = new POIFSFileSystem(new File(args[0]));
HSSFWorkbook wb = new HSSFWorkbook(fs);
try {
pw.println( "Drawing group:" );
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.FileMagic;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.NotOLE2FileException;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
-import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.NotImplemented;
@SuppressWarnings("unchecked")
public static <T extends POITextExtractor> T createExtractor(File f) throws IOException, OpenXML4JException, XmlException {
- NPOIFSFileSystem fs = null;
+ POIFSFileSystem fs = null;
try {
- fs = new NPOIFSFileSystem(f);
+ fs = new POIFSFileSystem(f);
if (fs.getRoot().hasEntry(Decryptor.DEFAULT_POIFS_ENTRY)) {
return (T)createEncryptedOOXMLExtractor(fs);
}
switch (fm) {
case OLE2:
- NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
+ POIFSFileSystem fs = new POIFSFileSystem(is);
boolean isEncrypted = fs.getRoot().hasEntry(Decryptor.DEFAULT_POIFS_ENTRY);
return isEncrypted ? createEncryptedOOXMLExtractor(fs) : createExtractor(fs);
case OOXML:
public static <T extends POITextExtractor> T createExtractor(POIFSFileSystem fs) throws IOException, OpenXML4JException, XmlException {
return createExtractor(fs.getRoot());
}
- public static <T extends POITextExtractor> T createExtractor(NPOIFSFileSystem fs) throws IOException, OpenXML4JException, XmlException {
- return createExtractor(fs.getRoot());
- }
@SuppressWarnings("unchecked")
public static <T extends POITextExtractor> T createExtractor(DirectoryNode poifsDir) throws IOException, OpenXML4JException, XmlException
throw new IllegalStateException("Not yet supported");
}
- private static POITextExtractor createEncryptedOOXMLExtractor(NPOIFSFileSystem fs)
+ private static POITextExtractor createEncryptedOOXMLExtractor(POIFSFileSystem fs)
throws IOException {
String pass = Biff8EncryptionKey.getCurrentUserPassword();
if (pass == null) {
@Override
public void setFont(Font font) {
if(font != null){
- long index = font.getIndex();
+ long index = font.getIndexAsInt();
this._cellXf.setFontId(index);
this._cellXf.setApplyFont(true);
} else {
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.poi.POIDataSamples;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.junit.Assume;
// the fix limits the available size and tries to read all entries
File f = samples.getFile("extenxls_pwd123.xlsx");
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(f, true)) {
+ try (POIFSFileSystem fs = new POIFSFileSystem(f, true)) {
EncryptionInfo info = new EncryptionInfo(fs);
Decryptor d = Decryptor.getInstance(info);
d.verifyPassword("pwd123");
import org.apache.poi.poifs.filesystem.DocumentEntry;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.TempFile;
final EncryptionInfo infoExpected;
final Decryptor decExpected;
- try (NPOIFSFileSystem nfs = new NPOIFSFileSystem(file, true)) {
+ try (POIFSFileSystem nfs = new POIFSFileSystem(file, true)) {
// Check the encryption details
infoExpected = new EncryptionInfo(nfs);
final EncryptionInfo infoActual2;
final byte[] payloadActual, encPackActual;
final long decPackLenActual;
- try (NPOIFSFileSystem nfs = new NPOIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()))) {
+ try (POIFSFileSystem nfs = new POIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()))) {
infoActual2 = new EncryptionInfo(nfs.getRoot());
Decryptor decActual = Decryptor.getInstance(infoActual2);
boolean passed = decActual.verifyPassword(pass);
final byte[] payloadExpected;
final EncryptionInfo infoExpected;
final Decryptor d;
- try (NPOIFSFileSystem nfs = new NPOIFSFileSystem(file, true)) {
+ try (POIFSFileSystem nfs = new POIFSFileSystem(file, true)) {
// Check the encryption details
infoExpected = new EncryptionInfo(nfs);
}
final byte[] payloadActual;
- try (NPOIFSFileSystem nfs = new NPOIFSFileSystem(new ByteArrayInputStream(encBytes))) {
+ try (POIFSFileSystem nfs = new POIFSFileSystem(new ByteArrayInputStream(encBytes))) {
final EncryptionInfo ei = new EncryptionInfo(nfs);
Decryptor d2 = Decryptor.getInstance(ei);
assertTrue("Unable to process: document is encrypted", d2.verifyPassword(pass));
Encryptor enc = info.getEncryptor();
enc.confirmPassword("password");
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem()) {
+ try (POIFSFileSystem fs = new POIFSFileSystem()) {
try (OutputStream os = enc.getDataStream(fs)) {
pkg.save(os);
}
- try (NPOIFSFileSystem inpFS = new NPOIFSFileSystem(new ByteArrayInputStream(encBytes))) {
+ try (POIFSFileSystem inpFS = new POIFSFileSystem(new ByteArrayInputStream(encBytes))) {
// Check we can decrypt it
EncryptionInfo info = new EncryptionInfo(inpFS);
Decryptor d = Decryptor.getInstance(info);
- assertEquals(true, d.verifyPassword("password"));
+ assertTrue(d.verifyPassword("password"));
try (OPCPackage inpPkg = OPCPackage.open(d.getDataStream(inpFS))) {
// Check it now has empty core properties
IOUtils.copy(fis, fos);
}
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(f, false)) {
+ try (POIFSFileSystem fs = new POIFSFileSystem(f, false)) {
// decrypt the protected file - in this case it was encrypted with the default password
EncryptionInfo encInfo = new EncryptionInfo(fs);
final byte[] epNewBytes;
final EncryptionInfo infoReload;
- try (NPOIFSFileSystem fsNew = new NPOIFSFileSystem()) {
+ try (POIFSFileSystem fsNew = new POIFSFileSystem()) {
try (OutputStream os = enc.getDataStream(fsNew)) {
os.write(zipInput);
}
ByteArrayOutputStream bos = new ByteArrayOutputStream();
fsNew.writeFilesystem(bos);
- try (NPOIFSFileSystem fsReload = new NPOIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()))) {
+ try (POIFSFileSystem fsReload = new POIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()))) {
infoReload = new EncryptionInfo(fsReload);
try (InputStream epReload = fsReload.getRoot().createDocumentInputStream("EncryptedPackage")) {
epNewBytes = IOUtils.toByteArray(epReload, 9400);
import org.apache.poi.poifs.crypt.EncryptionInfo;
import org.apache.poi.poifs.crypt.EncryptionMode;
import org.apache.poi.poifs.crypt.Encryptor;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.usermodel.BaseTestSlideShowFactory;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.TempFile;
}
private static File createProtected() throws IOException, GeneralSecurityException {
- return createProtected(filename, password);
- }
-
- private static File createProtected(String basefile, String password)
- throws IOException, GeneralSecurityException {
- NPOIFSFileSystem fs = new NPOIFSFileSystem();
- EncryptionInfo info = new EncryptionInfo(EncryptionMode.agile);
- Encryptor enc = info.getEncryptor();
- enc.confirmPassword(password);
- InputStream fis = _slTests.openResourceAsStream(basefile);
- OutputStream os = enc.getDataStream(fs);
- IOUtils.copy(fis, os);
- os.close();
- fis.close();
-
- File tf = TempFile.createTempFile("test-xslf-slidefactory", ".pptx");
- FileOutputStream fos = new FileOutputStream(tf);
- fs.writeFilesystem(fos);
- fos.close();
- fs.close();
-
- return tf;
+ try (POIFSFileSystem fs = new POIFSFileSystem()) {
+ EncryptionInfo info = new EncryptionInfo(EncryptionMode.agile);
+ Encryptor enc = info.getEncryptor();
+ enc.confirmPassword(password);
+ try (InputStream fis = _slTests.openResourceAsStream(filename);
+ OutputStream os = enc.getDataStream(fs)) {
+ IOUtils.copy(fis, os);
+ }
+
+ File tf = TempFile.createTempFile("test-xslf-slidefactory", ".pptx");
+ try (FileOutputStream fos = new FileOutputStream(tf)) {
+ fs.writeFilesystem(fos);
+ }
+ return tf;
+ }
}
}
import org.apache.poi.openxml4j.opc.PackageRelationship;
import org.apache.poi.openxml4j.opc.PackagingURIHelper;
import org.apache.poi.openxml4j.util.ZipSecureFile;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.ITestDataProvider;
import org.apache.poi.ss.SpreadsheetVersion;
// Add some more tables, and check
- t = s2.createTable();
+ t = s2.createTable(null);
t.setName("New 2");
t.setDisplayName("New 2");
- t = s3.createTable();
+ t = s3.createTable(null);
t.setName("New 3");
t.setDisplayName("New 3");
}
@Test
- public void bug55692_stream() throws IOException, InvalidFormatException {
+ public void bug55692_stream() throws IOException {
// Directly on a Stream, will go via NPOIFS and spot it's
// actually a .xlsx file encrypted with the default password, and open
Workbook wb = WorkbookFactory.create(
public void bug55692_npoifs() throws IOException {
// Via a NPOIFSFileSystem, will spot it's actually a .xlsx file
// encrypted with the default password, and open
- NPOIFSFileSystem fsNP = new NPOIFSFileSystem(
+ POIFSFileSystem fsNP = new POIFSFileSystem(
POIDataSamples.getPOIFSInstance().openResourceAsStream("protect.xlsx"));
Workbook wb = WorkbookFactory.create(fsNP);
assertNotNull(wb);
* error message if given one
*/
@Test
- public void bug56800_xlsb() throws IOException, InvalidFormatException {
+ public void bug56800_xlsb() throws IOException {
// Can be opened at the OPC level
OPCPackage pkg = XSSFTestDataSamples.openSamplePackage("Simple.xlsb");
}
private void runTest56574(boolean createRow) throws IOException {
- Workbook wb = XSSFTestDataSamples.openSampleWorkbook("56574.xlsx");
+ XSSFWorkbook wb = XSSFTestDataSamples.openSampleWorkbook("56574.xlsx");
Sheet sheet = wb.getSheet("Func");
assertNotNull(sheet);
}
}
- XSSFFormulaEvaluator.evaluateAllFormulaCells((XSSFWorkbook) wb);
+ XSSFFormulaEvaluator.evaluateAllFormulaCells(wb);
wb.getCreationHelper().createFormulaEvaluator().evaluateAll();
- CalculationChain chain = ((XSSFWorkbook) wb).getCalculationChain();
+ CalculationChain chain = wb.getCalculationChain();
checkCellsAreGone(chain);
- Workbook wbBack = XSSFTestDataSamples.writeOutAndReadBack(wb);
+ XSSFWorkbook wbBack = XSSFTestDataSamples.writeOutAndReadBack(wb);
Sheet sheetBack = wbBack.getSheet("Func");
assertNotNull(sheetBack);
- chain = ((XSSFWorkbook) wbBack).getCalculationChain();
+ chain = wbBack.getCalculationChain();
checkCellsAreGone(chain);
wbBack.close();
}
@Test
- public void test51626() throws IOException, InvalidFormatException {
+ public void test51626() throws IOException {
Workbook wb = XSSFTestDataSamples.openSampleWorkbook("51626.xlsx");
assertNotNull(wb);
wb.close();
final String initialFormula = "A1";
final String expectedFormula = "#REF!"; // from ms excel
- Workbook wb = new XSSFWorkbook();
+ XSSFWorkbook wb = new XSSFWorkbook();
Sheet sheet = wb.createSheet("sheet1");
sheet.createRow(0).createCell(0).setCellValue(1); // A1 = 1
{
FormulaShifter formulaShifter = FormulaShifter.createForRowCopy(0, "sheet1", 2/*firstRowToShift*/, 2/*lastRowToShift*/
, -1/*step*/, SpreadsheetVersion.EXCEL2007); // parameters 2, 2, -1 should mean : move row range [2-2] one level up
- XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create((XSSFWorkbook) wb);
+ XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs = FormulaParser.parse(initialFormula, fpb, FormulaType.CELL, 0); // [A1]
formulaShifter.adjustFormula(ptgs, 0); // adjusted to [A]
String shiftedFmla = FormulaRenderer.toFormulaString(fpb, ptgs); //A
{
FormulaShifter formulaShifter = FormulaShifter.createForRowShift(0, "sheet1", 2/*firstRowToShift*/, 2/*lastRowToShift*/
, -1/*step*/, SpreadsheetVersion.EXCEL2007); // parameters 2, 2, -1 should mean : move row range [2-2] one level up
- XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create((XSSFWorkbook) wb);
+ XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs = FormulaParser.parse(initialFormula, fpb, FormulaType.CELL, 0); // [A1]
formulaShifter.adjustFormula(ptgs, 0); // adjusted to [A]
String shiftedFmla = FormulaRenderer.toFormulaString(fpb, ptgs); //A
XSSFWorkbook wb = new XSSFWorkbook();
XSSFSheet sheet = wb.createSheet();
- XSSFTable table1 = sheet.createTable();
- XSSFTable table2 = sheet.createTable();
- XSSFTable table3 = sheet.createTable();
+ XSSFTable table1 = sheet.createTable(null);
+ XSSFTable table2 = sheet.createTable(null);
+ XSSFTable table3 = sheet.createTable(null);
sheet.removeTable(table1);
- sheet.createTable();
+ sheet.createTable(null);
sheet.removeTable(table2);
sheet.removeTable(table3);
- sheet.createTable();
+ sheet.createTable(null);
wb.close();
}
/**
* Auto column sizing failed when there were loads of fonts with
* errors like ArrayIndexOutOfBoundsException: -32765
- * TODO Get this to actually reproduce the bug...
*/
@Test
public void test62108() {
for (int i=0; i<fonts.length; i++) {
XSSFFont font = wb.createFont();
font.setFontHeight(i);
+ fonts[i] = font;
}
// Create a moderate number of columns, which use
sheet = wbBack.getSheetAt(0);
assertEquals("E11", sheet.getActiveCell().formatAsString());
wbBack.close();
-
- //wb.write(new FileOutputStream("c:/temp/61905." + instance.getStandardFileNameExtension()));
}
}
import org.apache.poi.poifs.crypt.Decryptor;
import org.apache.poi.poifs.crypt.EncryptionInfo;
import org.apache.poi.poifs.crypt.HashAlgorithm;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.xwpf.extractor.XWPFWordExtractor;
import org.apache.poi.xwpf.usermodel.XWPFDocument;
import org.apache.xmlbeans.XmlException;
@Test
public void bug53475NoCSPName() throws Exception {
File file = POIDataSamples.getDocumentInstance().getFile("bug53475-password-is-solrcell.docx");
- NPOIFSFileSystem filesystem = new NPOIFSFileSystem(file, true);
+ POIFSFileSystem filesystem = new POIFSFileSystem(file, true);
// Check the encryption details
EncryptionInfo info = new EncryptionInfo(filesystem);
Assume.assumeTrue("Please install JCE Unlimited Strength Jurisdiction Policy files for AES 256", maxKeyLen == 2147483647);
File file = POIDataSamples.getDocumentInstance().getFile("bug53475-password-is-pass.docx");
- NPOIFSFileSystem filesystem = new NPOIFSFileSystem(file, true);
+ POIFSFileSystem filesystem = new POIFSFileSystem(file, true);
// Check the encryption details
EncryptionInfo info = new EncryptionInfo(filesystem);
package org.apache.poi.hdgf;
-import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import org.apache.poi.hdgf.streams.StringsStream;
import org.apache.poi.hdgf.streams.TrailerStream;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
public final class HDGFDiagram extends POIReadOnlyDocument {
private static final String VISIO_HEADER = "Visio (TM) Drawing\r\n";
- private byte[] _docstream;
-
- private short version;
private long docSize;
private Pointer trailerPointer;
private TrailerStream trailer;
- private ChunkFactory chunkFactory;
- private PointerFactory ptrFactory;
-
public HDGFDiagram(POIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}
- public HDGFDiagram(NPOIFSFileSystem fs) throws IOException {
- this(fs.getRoot());
- }
+
public HDGFDiagram(DirectoryNode dir) throws IOException {
super(dir);
// Grab the document stream
- InputStream is = dir.createDocumentInputStream("VisioDocument");
- _docstream = IOUtils.toByteArray(is);
- is.close();
+ final byte[] _docstream;
+ try (InputStream is = dir.createDocumentInputStream("VisioDocument")) {
+ _docstream = IOUtils.toByteArray(is);
+ }
// Check it's really visio
String typeString = new String(_docstream, 0, 20, LocaleUtil.CHARSET_1252 );
}
// Grab the version number, 0x1a -> 0x1b
- version = LittleEndian.getShort(_docstream, 0x1a);
+ short version = LittleEndian.getShort(_docstream, 0x1a);
// Grab the document size, 0x1c -> 0x1f
docSize = LittleEndian.getUInt(_docstream, 0x1c);
// ??? 0x20 -> 0x23
// Create the Chunk+Pointer Factories for the document version
- ptrFactory = new PointerFactory(version);
- chunkFactory = new ChunkFactory(version);
+ PointerFactory ptrFactory = new PointerFactory(version);
+ ChunkFactory chunkFactory = new ChunkFactory(version);
// Grab the pointer to the trailer
trailerPointer = ptrFactory.createPointer(_docstream, 0x24);
import org.apache.poi.hdgf.streams.ChunkStream;
import org.apache.poi.hdgf.streams.PointerContainingStream;
import org.apache.poi.hdgf.streams.Stream;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
* Developer helper class to dump out the pointer+stream structure
System.exit(1);
}
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(new File(args[0]));
+ POIFSFileSystem poifs = new POIFSFileSystem(new File(args[0]));
try {
HDGFDiagram hdgf = new HDGFDiagram(poifs);
import org.apache.poi.hdgf.streams.PointerContainingStream;
import org.apache.poi.hdgf.streams.Stream;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
public VisioTextExtractor(POIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}
- public VisioTextExtractor(NPOIFSFileSystem fs) throws IOException {
- this(fs.getRoot());
- }
+
public VisioTextExtractor(DirectoryNode dir) throws IOException {
this(new HDGFDiagram(dir));
}
+
public VisioTextExtractor(InputStream inp) throws IOException {
- this(new NPOIFSFileSystem(inp));
+ this(new POIFSFileSystem(inp));
}
/**
for(Stream stream : hdgf.getTopLevelStreams()) {
findText(stream, text);
}
- return text.toArray( new String[text.size()] );
+ return text.toArray(new String[0]);
}
private void findText(Stream stream, List<String> text) {
if(stream instanceof PointerContainingStream) {
*/
@Override
public String getText() {
- StringBuffer text = new StringBuffer();
+ StringBuilder text = new StringBuilder();
for(String t : getAllText()) {
text.append(t);
if(!t.endsWith("\r") && !t.endsWith("\n")) {
import org.apache.poi.hpbf.model.MainContents;
import org.apache.poi.hpbf.model.QuillContents;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
public HPBFDocument(POIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}
- public HPBFDocument(NPOIFSFileSystem fs) throws IOException {
- this(fs.getRoot());
- }
+
public HPBFDocument(InputStream inp) throws IOException {
- this(new NPOIFSFileSystem(inp));
+ this(new POIFSFileSystem(inp));
}
/**
import org.apache.poi.ddf.DefaultEscherRecordFactory;
import org.apache.poi.ddf.EscherRecord;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LocaleUtil;
* constructed.
*/
public final class HPBFDumper {
- private NPOIFSFileSystem fs;
- public HPBFDumper(NPOIFSFileSystem fs) {
+ private POIFSFileSystem fs;
+ public HPBFDumper(POIFSFileSystem fs) {
this.fs = fs;
}
@SuppressWarnings("resource")
public HPBFDumper(InputStream inp) throws IOException {
- this(new NPOIFSFileSystem(inp));
+ this(new POIFSFileSystem(inp));
}
private static byte[] getData(DirectoryNode dir, String name) throws IOException {
System.err.println(" HPBFDumper <filename>");
System.exit(1);
}
- HPBFDumper dump = new HPBFDumper(new NPOIFSFileSystem(new File(args[0])));
+ HPBFDumper dump = new HPBFDumper(new POIFSFileSystem(new File(args[0])));
System.out.println("Dumping " + args[0]);
dump.dumpContents();
import org.apache.poi.hpbf.HPBFDocument;
import org.apache.poi.hpbf.model.QuillContents;
import org.apache.poi.hpbf.model.qcbits.QCBit;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.HexDump;
/**
doc = hpbfDoc;
qc = doc.getQuillContents();
}
- public PLCDumper(NPOIFSFileSystem fs) throws IOException {
+ public PLCDumper(POIFSFileSystem fs) throws IOException {
this(new HPBFDocument(fs));
}
public PLCDumper(InputStream inp) throws IOException {
- this(new NPOIFSFileSystem(inp));
+ this(new POIFSFileSystem(inp));
}
public static void main(String[] args) throws Exception {
import org.apache.poi.hpbf.model.qcbits.QCTextBit;
import org.apache.poi.hpbf.model.qcbits.QCPLCBit.Type12;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
public PublisherTextExtractor(POIFSFileSystem fs) throws IOException {
this(new HPBFDocument(fs));
}
- public PublisherTextExtractor(NPOIFSFileSystem fs) throws IOException {
- this(new HPBFDocument(fs));
- }
public PublisherTextExtractor(InputStream is) throws IOException {
this(new POIFSFileSystem(is));
}
// Get the text from the Quill Contents
QCBit[] bits = doc.getQuillContents().getBits();
- for(int i=0; i<bits.length; i++) {
- if(bits[i] != null && bits[i] instanceof QCTextBit) {
- QCTextBit t = (QCTextBit)bits[i];
- text.append( t.getText().replace('\r', '\n') );
+ for (QCBit bit1 : bits) {
+ if (bit1 != null && bit1 instanceof QCTextBit) {
+ QCTextBit t = (QCTextBit) bit1;
+ text.append(t.getText().replace('\r', '\n'));
}
}
// hyperlink is in, and we have yet to figure out
// how to tie that together.
if(hyperlinksByDefault) {
- for(int i=0; i<bits.length; i++) {
- if(bits[i] != null && bits[i] instanceof Type12) {
- Type12 hyperlinks = (Type12)bits[i];
- for(int j=0; j<hyperlinks.getNumberOfHyperlinks(); j++) {
+ for (QCBit bit : bits) {
+ if (bit != null && bit instanceof Type12) {
+ Type12 hyperlinks = (Type12) bit;
+ for (int j = 0; j < hyperlinks.getNumberOfHyperlinks(); j++) {
text.append("<");
text.append(hyperlinks.getHyperlink(j));
text.append(">\n");
System.err.println(" PublisherTextExtractor <file.pub>");
}
- for(int i=0; i<args.length; i++) {
- FileInputStream fis = new FileInputStream(args[i]);
- try {
- PublisherTextExtractor te = new PublisherTextExtractor(fis);
- System.out.println(te.getText());
- te.close();
- } finally {
- fis.close();
- }
+ for (String arg : args) {
+ try (FileInputStream fis = new FileInputStream(arg)) {
+ PublisherTextExtractor te = new PublisherTextExtractor(fis);
+ System.out.println(te.getText());
+ te.close();
+ }
}
}
}
import org.apache.poi.hslf.record.RecordTypes;
import org.apache.poi.hslf.usermodel.HSLFSlideShow;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
private boolean hexHeader = true;
public PPTXMLDump(File ppt) throws IOException {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(ppt, true);
+ POIFSFileSystem fs = new POIFSFileSystem(ppt, true);
try {
docstream = readEntry(fs, HSLFSlideShow.POWERPOINT_DOCUMENT);
pictstream = readEntry(fs, PICTURES_ENTRY);
}
}
- private static byte[] readEntry(NPOIFSFileSystem fs, String entry)
+ private static byte[] readEntry(POIFSFileSystem fs, String entry)
throws IOException {
DirectoryNode dn = fs.getRoot();
if (!dn.hasEntry(entry)) {
return;
}
boolean outFile = false;
- for (int i = 0; i < args.length; i++){
+ for (String arg : args) {
- if (args[i].startsWith("-")) {
- if ("-f".equals(args[i])){
+ if (arg.startsWith("-")) {
+ if ("-f".equals(arg)) {
//write ouput to a file
outFile = true;
}
} else {
- File ppt = new File(args[i]);
+ File ppt = new File(arg);
PPTXMLDump dump = new PPTXMLDump(ppt);
- System.out.println("Dumping " + args[i]);
+ System.out.println("Dumping " + arg);
- if (outFile){
+ if (outFile) {
FileOutputStream fos = new FileOutputStream(ppt.getName() + ".xml");
OutputStreamWriter out = new OutputStreamWriter(fos, StandardCharsets.UTF_8);
dump.dump(out);
import org.apache.poi.hslf.record.HSLFEscherRecordFactory;
import org.apache.poi.hslf.record.RecordTypes;
import org.apache.poi.hslf.usermodel.HSLFSlideShow;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
filename = args[1];
}
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(new File(filename));
+ POIFSFileSystem poifs = new POIFSFileSystem(new File(filename));
SlideShowDumper foo = new SlideShowDumper(poifs, System.out);
poifs.close();
* @param filesystem the POIFS FileSystem to read from
* @throws IOException if there is a problem while parsing the document.
*/
- public SlideShowDumper(NPOIFSFileSystem filesystem, PrintStream out) throws IOException {
+ public SlideShowDumper(POIFSFileSystem filesystem, PrintStream out) throws IOException {
// Grab the document stream
InputStream is = filesystem.createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT);
docstream = IOUtils.toByteArray(is);
import org.apache.poi.hslf.usermodel.HSLFTextParagraph;
import org.apache.poi.hssf.record.crypto.Biff8EncryptionKey;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.sl.usermodel.SlideShowFactory;
this((HSLFSlideShow)SlideShowFactory.create(fs, Biff8EncryptionKey.getCurrentUserPassword()));
}
- /**
- * Creates a PowerPointExtractor, from an open NPOIFSFileSystem
- *
- * @param fs the NPOIFSFileSystem containing the PowerPoint document
- */
- public PowerPointExtractor(NPOIFSFileSystem fs) throws IOException {
- this((HSLFSlideShow)SlideShowFactory.create(fs, Biff8EncryptionKey.getCurrentUserPassword()));
- }
-
/**
* Creates a PowerPointExtractor, from a specific place
* inside an open NPOIFSFileSystem
import org.apache.poi.hslf.record.TextCharsAtom;
import org.apache.poi.hslf.usermodel.HSLFSlideShow;
import org.apache.poi.hslf.usermodel.HSLFTextParagraph;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
* lucene indexers) that would ever want to use this!
*/
public final class QuickButCruddyTextExtractor {
- private NPOIFSFileSystem fs;
+ private POIFSFileSystem fs;
private InputStream is;
private byte[] pptContents;
*/
@SuppressWarnings("resource")
public QuickButCruddyTextExtractor(String fileName) throws IOException {
- this(new NPOIFSFileSystem(new File(fileName)));
+ this(new POIFSFileSystem(new File(fileName)));
}
/**
*/
@SuppressWarnings("resource")
public QuickButCruddyTextExtractor(InputStream iStream) throws IOException {
- this(new NPOIFSFileSystem(iStream));
+ this(new POIFSFileSystem(iStream));
is = iStream;
}
* Creates an extractor from a POIFS Filesystem
* @param poifs
*/
- public QuickButCruddyTextExtractor(NPOIFSFileSystem poifs) throws IOException {
+ public QuickButCruddyTextExtractor(POIFSFileSystem poifs) throws IOException {
fs = poifs;
// Find the PowerPoint bit, and get out the bytes
import org.apache.poi.hslf.exceptions.OldPowerPointFormatException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentEntry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.POILogFactory;
private static final int MAX_RECORD_LENGTH = 1_000_000;
/** Standard Atom header */
- public static final byte[] atomHeader = new byte[] { 0, 0, -10, 15 };
+ private static final byte[] atomHeader = new byte[] { 0, 0, -10, 15 };
/** The PowerPoint magic number for a non-encrypted file */
- public static final byte[] headerToken = new byte[] { 95, -64, -111, -29 };
- /** The PowerPoint magic number for an encrypted file */
- public static final byte[] encHeaderToken = new byte[] { -33, -60, -47, -13 };
- /** The Powerpoint 97 version, major and minor numbers */
- public static final byte[] ppt97FileVer = new byte[] { 8, 00, -13, 03, 03, 00 };
+ private static final byte[] headerToken = new byte[] { 95, -64, -111, -29 };
+ /** The PowerPoint magic number for an encrypted file */
+ private static final byte[] encHeaderToken = new byte[] { -33, -60, -47, -13 };
+ // The Powerpoint 97 version, major and minor numbers
+ // byte[] ppt97FileVer = new byte[] { 8, 00, -13, 03, 03, 00 };
/** The version, major and minor numbers */
private int docFinalVersion;
/**
* Writes ourselves back out to a filesystem
*/
- public void writeToFS(NPOIFSFileSystem fs) throws IOException {
+ public void writeToFS(POIFSFileSystem fs) throws IOException {
// Grab contents
ByteArrayOutputStream baos = new ByteArrayOutputStream();
writeOut(baos);
import org.apache.poi.hslf.record.*;
import org.apache.poi.hslf.record.SlideListWithText.SlideAtomsSet;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
-import org.apache.poi.poifs.filesystem.Ole10Native;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
+import org.apache.poi.poifs.filesystem.Ole10Native;
import org.apache.poi.sl.usermodel.MasterSheet;
import org.apache.poi.sl.usermodel.PictureData.PictureType;
import org.apache.poi.sl.usermodel.Resources;
* Constructs a Powerpoint document from an POIFSFileSystem.
*/
@SuppressWarnings("resource")
- public HSLFSlideShow(NPOIFSFileSystem npoifs) throws IOException {
+ public HSLFSlideShow(POIFSFileSystem npoifs) throws IOException {
this(new HSLFSlideShowImpl(npoifs));
}
/**
* @return the current loading/saving phase
*/
- protected static LoadSavePhase getLoadSavePhase() {
+ static LoadSavePhase getLoadSavePhase() {
return loadSavePhase.get();
}
// PersistPtr, remove their old positions
int[] ids = pph.getKnownSlideIDs();
for (int id : ids) {
- if (mostRecentByBytes.containsKey(id)) {
- mostRecentByBytes.remove(id);
- }
+ mostRecentByBytes.remove(id);
}
// Now, update the byte level locations with their latest values
// We'll also want to be able to turn the slide IDs into a position
// in this array
_sheetIdToCoreRecordsLookup = new HashMap<>();
- Integer[] allIDs = mostRecentByBytes.keySet().toArray(new Integer[mostRecentByBytes.size()]);
+ Integer[] allIDs = mostRecentByBytes.keySet().toArray(new Integer[0]);
Arrays.sort(allIDs);
for (int i = 0; i < allIDs.length; i++) {
_sheetIdToCoreRecordsLookup.put(allIDs[i], i);
/**
* Returns the data of all the embedded OLE object in the SlideShow
*/
+ @SuppressWarnings("WeakerAccess")
public HSLFObjectData[] getEmbeddedObjects() {
return _hslfSlideShow.getEmbeddedObjects();
}
/**
* Helper method for usermodel: Get the font collection
*/
- protected FontCollection getFontCollection() {
+ FontCollection getFontCollection() {
return _fonts;
}
* @param newSlideNumber
* The new slide number (1 based)
*/
+ @SuppressWarnings("WeakerAccess")
public void reorderSlide(int oldSlideNumber, int newSlideNumber) {
// Ensure these numbers are valid
if (oldSlideNumber < 1 || newSlideNumber < 1) {
lst.addAll(Arrays.asList(s.getSlideRecords()));
}
- Record[] r = lst.toArray(new Record[lst.size()]);
+ Record[] r = lst.toArray(new Record[0]);
slwt.setChildRecord(r);
}
* the index of the slide to remove (0-based)
* @return the slide that was removed from the slide show.
*/
+ @SuppressWarnings("WeakerAccess")
public HSLFSlide removeSlide(int index) {
int lastSlideIdx = _slides.size() - 1;
if (index < 0 || index > lastSlideIdx) {
if (sa.isEmpty()) {
_documentRecord.removeSlideListWithText(slwt);
} else {
- slwt.setSlideAtomsSets(sa.toArray(new SlideAtomsSet[sa.size()]));
- slwt.setChildRecord(records.toArray(new Record[records.size()]));
+ slwt.setSlideAtomsSets(sa.toArray(new SlideAtomsSet[0]));
+ slwt.setChildRecord(records.toArray(new Record[0]));
}
// if the removed slide had notes - remove references to them too
SlideListWithText nslwt = _documentRecord.getNotesSlideListWithText();
records = new ArrayList<>();
ArrayList<SlideAtomsSet> na = new ArrayList<>();
- for (SlideAtomsSet ns : nslwt.getSlideAtomsSets()) {
- if (ns.getSlidePersistAtom().getSlideIdentifier() == notesId) {
- continue;
- }
- na.add(ns);
- records.add(ns.getSlidePersistAtom());
- if (ns.getSlideRecords() != null) {
- records.addAll(Arrays.asList(ns.getSlideRecords()));
+ if (nslwt != null) {
+ for (SlideAtomsSet ns : nslwt.getSlideAtomsSets()) {
+ if (ns.getSlidePersistAtom().getSlideIdentifier() == notesId) {
+ continue;
+ }
+ na.add(ns);
+ records.add(ns.getSlidePersistAtom());
+ if (ns.getSlideRecords() != null) {
+ records.addAll(Arrays.asList(ns.getSlideRecords()));
+ }
}
}
if (na.isEmpty()) {
_documentRecord.removeSlideListWithText(nslwt);
} else {
- nslwt.setSlideAtomsSets(na.toArray(new SlideAtomsSet[na.size()]));
- nslwt.setChildRecord(records.toArray(new Record[records.size()]));
+ nslwt.setSlideAtomsSets(na.toArray(new SlideAtomsSet[0]));
+ nslwt.setChildRecord(records.toArray(new Record[0]));
}
}
SlidePersistAtom prev = null;
for (SlideAtomsSet sas : slist.getSlideAtomsSets()) {
SlidePersistAtom spa = sas.getSlidePersistAtom();
- if (spa.getSlideIdentifier() < 0) {
- // This is for a master slide
- // Odd, since we only deal with the Slide SLWT
- } else {
+ if (spa.getSlideIdentifier() >= 0) {
// Must be for a real slide
if (prev == null) {
prev = spa;
throw new IllegalArgumentException("Unsupported picture format: " + format);
}
byte[] data = IOUtils.safelyAllocate(pict.length(), MAX_RECORD_LENGTH);
- FileInputStream is = new FileInputStream(pict);
- try {
+ try (FileInputStream is = new FileInputStream(pict)) {
IOUtils.readFully(is, data);
- } finally {
- is.close();
- }
+ }
return addPicture(data, format);
}
* "ShockwaveFlash.ShockwaveFlash.9"
* @return 0-based index of the control
*/
+ @SuppressWarnings("unused")
public int addControl(String name, String progId) {
ExControl ctrl = new ExControl();
ctrl.setProgId(progId);
return new HPSFPropertiesExtractor(getSlideShowImpl());
}
- protected int addToObjListAtom(RecordContainer exObj) {
+ int addToObjListAtom(RecordContainer exObj) {
ExObjList lst = getDocumentRecord().getExObjList(true);
ExObjListAtom objAtom = lst.getExObjListAtom();
// increment the object ID seed
return objectId;
}
- protected static Map<String,ClassID> getOleMap() {
+ private static Map<String,ClassID> getOleMap() {
Map<String,ClassID> olemap = new HashMap<>();
olemap.put(POWERPOINT_DOCUMENT, ClassIDPredefined.POWERPOINT_V8.getClassID());
// as per BIFF8 spec
return olemap;
}
- protected int addPersistentObject(PositionDependentRecord slideRecord) {
+ private int addPersistentObject(PositionDependentRecord slideRecord) {
slideRecord.setLastOnDiskOffset(HSLFSlideShowImpl.UNSET_OFFSET);
_hslfSlideShow.appendRootLevelRecord((Record)slideRecord);
@Override
public MasterSheet<HSLFShape,HSLFTextParagraph> createMasterSheet() throws IOException {
- // TODO Auto-generated method stub
+ // TODO implement or throw exception if not supported
return null;
}
@Override
public Resources getResources() {
- // TODO Auto-generated method stub
+ // TODO implement or throw exception if not supported
return null;
}
import java.io.IOException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.usermodel.SlideShowFactory;
import org.apache.poi.util.Internal;
* Helper class which is instantiated by reflection from
* {@link SlideShowFactory#create(java.io.File)} and similar
*/
+@SuppressWarnings("unused")
@Internal
public class HSLFSlideShowFactory extends SlideShowFactory {
/**
* Note that in order to properly release resources the
* SlideShow should be closed after use.
*/
- public static HSLFSlideShow createSlideShow(final NPOIFSFileSystem fs) throws IOException {
+ public static HSLFSlideShow createSlideShow(final POIFSFileSystem fs) throws IOException {
return new HSLFSlideShow(fs);
}
import org.apache.poi.poifs.filesystem.DocumentEntry;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.EntryUtils;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.usermodel.PictureData.PictureType;
import org.apache.poi.util.IOUtils;
* "reader". It is only a very basic class for now
*/
public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
- public static final int UNSET_OFFSET = -1;
+ static final int UNSET_OFFSET = -1;
//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 200_000_000;
this(filesystem.getRoot());
}
- /**
- * Constructs a Powerpoint document from a POIFS Filesystem. Parses the
- * document and places all the important stuff into data structures.
- *
- * @param filesystem the POIFS FileSystem to read from
- * @throws IOException if there is a problem while parsing the document.
- */
- public HSLFSlideShowImpl(NPOIFSFileSystem filesystem) throws IOException {
- this(filesystem.getRoot());
- }
-
/**
* Constructs a Powerpoint document from a specific point in a
* POIFS Filesystem. Parses the document and places all the
* Extracts the main PowerPoint document stream from the
* POI file, ready to be passed
*
- * @throws IOException
+ * @throws IOException when the powerpoint can't be read
*/
private void readPowerPointStream() throws IOException {
// Get the main document stream
// Grab the document stream
int len = docProps.getSize();
- InputStream is = getDirectory().createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT);
- try {
+ try (InputStream is = getDirectory().createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT)) {
_docstream = IOUtils.toByteArray(is, len);
- } finally {
- is.close();
}
}
}
decryptData.close();
- return records.values().toArray(new Record[records.size()]);
+ return records.values().toArray(new Record[0]);
}
private void initRecordOffsets(byte[] docstream, int usrOffset, NavigableMap<Integer, Record> recordMap, Map<Integer, Integer> offset2id) {
byte[] pictstream = IOUtils.toByteArray(is, entry.getSize());
is.close();
- HSLFSlideShowEncrypted decryptData = new HSLFSlideShowEncrypted(getDocumentEncryptionAtom());
- try {
-
+ try (HSLFSlideShowEncrypted decryptData = new HSLFSlideShowEncrypted(getDocumentEncryptionAtom())) {
+
int pos = 0;
// An empty picture record (length 0) will take up 8 bytes
while (pos <= (pictstream.length - 8)) {
int offset = pos;
-
+
decryptData.decryptPicture(pictstream, offset);
-
+
// Image signature
int signature = LittleEndian.getUShort(pictstream, pos);
pos += LittleEndianConsts.SHORT_SIZE;
// Image size (excluding the 8 byte header)
int imgsize = LittleEndian.getInt(pictstream, pos);
pos += LittleEndianConsts.INT_SIZE;
-
+
// When parsing the BStoreDelay stream, [MS-ODRAW] says that we
// should terminate if the type isn't 0xf007 or 0xf018->0xf117
if (!((type == 0xf007) || (type >= 0xf018 && type <= 0xf117))) {
break;
}
-
+
// The image size must be 0 or greater
// (0 is allowed, but odd, since we do wind on by the header each
// time, so we won't get stuck)
if (imgsize < 0) {
throw new CorruptPowerPointFileException("The file contains a picture, at position " + _pictures.size() + ", which has a negatively sized data length, so we can't trust any of the picture data");
}
-
+
// If they type (including the bonus 0xF018) is 0, skip it
PictureType pt = PictureType.forNativeID(type - 0xF018);
if (pt == null) {
//The pictstream can be truncated halfway through a picture.
//This is not a problem if the pictstream contains extra pictures
//that are not used in any slide -- BUG-60305
- if (pos+imgsize > pictstream.length) {
+ if (pos + imgsize > pictstream.length) {
logger.log(POILogger.WARN, "\"Pictures\" stream may have ended early. In some circumstances, this is not a problem; " +
"in others, this could indicate a corrupt file");
break;
try {
HSLFPictureData pict = HSLFPictureData.create(pt);
pict.setSignature(signature);
-
+
// Copy the data, ready to pass to PictureData
byte[] imgdata = IOUtils.safelyAllocate(imgsize, MAX_RECORD_LENGTH);
System.arraycopy(pictstream, pos, imgdata, 0, imgdata.length);
pict.setRawData(imgdata);
-
+
pict.setOffset(offset);
pict.setIndex(_pictures.size());
_pictures.add(pict);
logger.log(POILogger.ERROR, "Problem reading picture: " + e + "\nYou document will probably become corrupted if you save it!");
}
}
-
+
pos += imgsize;
}
- } finally {
- decryptData.close();
}
}
* @param interestingRecords a map of interesting records (PersistPtrHolder and UserEditAtom)
* referenced by their RecordType. Only the very last of each type will be saved to the map.
* May be null, if not needed.
- * @throws IOException
*/
+ @SuppressWarnings("WeakerAccess")
public void updateAndWriteDependantRecords(OutputStream os, Map<RecordTypes, PositionDependentRecord> interestingRecords)
throws IOException {
// For position dependent records, hold where they were and now are
*/
public void write(File newFile, boolean preserveNodes) throws IOException {
// Get a new FileSystem to write into
- POIFSFileSystem outFS = POIFSFileSystem.create(newFile);
- try {
+ try (POIFSFileSystem outFS = POIFSFileSystem.create(newFile)) {
// Write into the new FileSystem
write(outFS, preserveNodes);
// Send the POIFSFileSystem object out to the underlying stream
outFS.writeFilesystem();
- } finally {
- outFS.close();
}
}
*/
public void write(OutputStream out, boolean preserveNodes) throws IOException {
// Get a new FileSystem to write into
- POIFSFileSystem outFS = new POIFSFileSystem();
- try {
+ try (POIFSFileSystem outFS = new POIFSFileSystem()) {
// Write into the new FileSystem
write(outFS, preserveNodes);
// Send the POIFSFileSystem object out to the underlying stream
outFS.writeFilesystem(out);
- } finally {
- outFS.close();
}
}
- private void write(NPOIFSFileSystem outFS, boolean copyAllOtherNodes) throws IOException {
+ private void write(POIFSFileSystem outFS, boolean copyAllOtherNodes) throws IOException {
// read properties and pictures, with old encryption settings where appropriate
if (_pictures == null) {
readPictures();
@Override
- public EncryptionInfo getEncryptionInfo() throws IOException {
+ public EncryptionInfo getEncryptionInfo() {
DocumentEncryptionAtom dea = getDocumentEncryptionAtom();
return (dea != null) ? dea.getEncryptionInfo() : null;
}
* Adds a new root level record, at the end, but before the last
* PersistPtrIncrementalBlock.
*/
+ @SuppressWarnings({"UnusedReturnValue", "WeakerAccess"})
public synchronized int appendRootLevelRecord(Record newRecord) {
int addedAt = -1;
Record[] r = new Record[_records.length + 1];
objects.add(new HSLFObjectData((ExOleObjStg) r));
}
}
- _objects = objects.toArray(new HSLFObjectData[objects.size()]);
+ _objects = objects.toArray(new HSLFObjectData[0]);
}
return _objects;
}
// only close the filesystem, if we are based on the root node.
// embedded documents/slideshows shouldn't close the parent container
if (getDirectory().getParent() == null) {
- NPOIFSFileSystem fs = getDirectory().getFileSystem();
+ POIFSFileSystem fs = getDirectory().getFileSystem();
if (fs != null) {
fs.close();
}
import org.apache.poi.hsmf.exceptions.ChunkNotFoundException;
import org.apache.poi.hsmf.parsers.POIFSChunkParser;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.CodePageUtil;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;
*/
public MAPIMessage() {
// TODO - make writing possible
- super(new NPOIFSFileSystem());
+ super(new POIFSFileSystem());
}
* @exception IOException on errors reading, or invalid data
*/
public MAPIMessage(File file) throws IOException {
- this(new NPOIFSFileSystem(file));
+ this(new POIFSFileSystem(file));
}
/**
* @exception IOException on errors reading, or invalid data
*/
public MAPIMessage(InputStream in) throws IOException {
- this(new NPOIFSFileSystem(in));
+ this(new POIFSFileSystem(in));
}
/**
* Constructor for reading MSG Files from a POIFS filesystem
* @param fs Open POIFS FileSystem containing the message
* @exception IOException on errors reading, or invalid data
*/
- public MAPIMessage(NPOIFSFileSystem fs) throws IOException {
+ public MAPIMessage(POIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}
/**
import org.apache.poi.hsmf.datatypes.PropertiesChunk;
import org.apache.poi.hsmf.datatypes.PropertyValue;
import org.apache.poi.hsmf.parsers.POIFSChunkParser;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
/**
* Dumps out the chunk details, and where possible contents
*/
public class HSMFDump {
- private NPOIFSFileSystem fs;
- public HSMFDump(NPOIFSFileSystem fs) {
+ private POIFSFileSystem fs;
+ public HSMFDump(POIFSFileSystem fs) {
this.fs = fs;
}
public static void main(String[] args) throws Exception {
for(String file : args) {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(file), true);
+ POIFSFileSystem fs = new POIFSFileSystem(new File(file), true);
HSMFDump dump = new HSMFDump(fs);
dump.dump();
fs.close();
import org.apache.poi.hsmf.datatypes.StringChunk;
import org.apache.poi.hsmf.exceptions.ChunkNotFoundException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.LocaleUtil;
import org.apache.poi.util.StringUtil.StringsIterator;
public OutlookTextExtactor(DirectoryNode poifsDir) throws IOException {
this(new MAPIMessage(poifsDir));
}
- public OutlookTextExtactor(NPOIFSFileSystem fs) throws IOException {
+ public OutlookTextExtactor(POIFSFileSystem fs) throws IOException {
this(new MAPIMessage(fs));
}
public OutlookTextExtactor(InputStream inp) throws IOException {
public static void main(String[] args) throws Exception {
for(String filename : args) {
- NPOIFSFileSystem poifs = null;
- OutlookTextExtactor extractor = null;
- try {
- poifs = new NPOIFSFileSystem(new File(filename));
- extractor = new OutlookTextExtactor(poifs);
- System.out.println( extractor.getText() );
- } finally {
- if (extractor != null) extractor.close();
- if (poifs != null) poifs.close();
+ try (POIFSFileSystem poifs = new POIFSFileSystem(new File(filename));
+ OutlookTextExtactor extractor = new OutlookTextExtactor(poifs)) {
+ System.out.println(extractor.getText());
}
}
}
// First try via the proper chunk
SimpleDateFormat f = new SimpleDateFormat("E, d MMM yyyy HH:mm:ss Z", Locale.ROOT);
f.setTimeZone(LocaleUtil.getUserTimeZone());
- s.append("Date: " + f.format(msg.getMessageDate().getTime()) + "\n");
+ s.append("Date: ").append(f.format(msg.getMessageDate().getTime())).append("\n");
} catch(ChunkNotFoundException e) {
try {
// Failing that try via the raw headers
String[] headers = msg.getHeaders();
for(String header: headers) {
if(startsWithIgnoreCase(header, "date:")) {
- s.append(
- "Date:" +
- header.substring(header.indexOf(':')+1) +
- "\n"
- );
+ s.append("Date:").append(header, header.indexOf(':')+1, header.length()).append("\n");
break;
}
}
}
try {
- s.append("Subject: " + msg.getSubject() + "\n");
+ s.append("Subject: ").append(msg.getSubject()).append("\n");
} catch(ChunkNotFoundException e) {}
// Display attachment names
att.getAttachMimeTag().getValue() != null) {
attName = att.getAttachMimeTag().getValue() + " = " + attName;
}
- s.append("Attachment: " + attName + "\n");
+ s.append("Attachment: ").append(attName).append("\n");
}
try {
- s.append("\n" + msg.getTextBody() + "\n");
+ s.append("\n").append(msg.getTextBody()).append("\n");
} catch(ChunkNotFoundException e) {}
return s.toString();
String[] names = displayText.split(";\\s*");
boolean first = true;
- s.append(type + ": ");
+ s.append(type).append(": ");
for(String name : names) {
if(first) {
first = false;
// Append the email address in <>, assuming
// the name wasn't already the email address
if(! email.equals(name)) {
- s.append( " <" + email + ">");
+ s.append(" <").append(email).append(">");
}
}
}
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;
public final class POIFSChunkParser {
private final static POILogger logger = POILogFactory.getLogger(POIFSChunkParser.class);
- public static ChunkGroup[] parse(NPOIFSFileSystem fs) throws IOException {
+ public static ChunkGroup[] parse(POIFSFileSystem fs) throws IOException {
return parse(fs.getRoot());
}
public static ChunkGroup[] parse(DirectoryNode node) throws IOException {
if(chunk != null) {
if(entry instanceof DocumentNode) {
- DocumentInputStream inp = null;
- try {
- inp = new DocumentInputStream((DocumentNode)entry);
+ try (DocumentInputStream inp = new DocumentInputStream((DocumentNode) entry)) {
chunk.readValue(inp);
grouping.record(chunk);
- } catch(IOException e) {
- logger.log(POILogger.ERROR, "Error reading from part " + entry.getName() + " - " + e);
- } finally {
- if (inp != null) inp.close();
+ } catch (IOException e) {
+ logger.log(POILogger.ERROR, "Error reading from part " + entry.getName() + " - " + e);
}
} else {
grouping.record(chunk);
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.EntryUtils;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.Internal;
private static final String STREAM_DATA = "Data";
/** table stream buffer*/
- protected byte[] _tableStream;
+ private byte[] _tableStream;
/** data stream buffer*/
- protected byte[] _dataStream;
+ private byte[] _dataStream;
/** Document wide Properties*/
- protected DocumentProperties _dop;
+ private DocumentProperties _dop;
/** Contains text of the document wrapped in a obfuscated Word data
* structure*/
- protected ComplexFileTable _cft;
+ private ComplexFileTable _cft;
/** Contains text buffer linked directly to single-piece document text piece */
- protected StringBuilder _text;
+ private StringBuilder _text;
/** Holds the save history for this document. */
- protected SavedByTable _sbt;
+ private SavedByTable _sbt;
/** Holds the revision mark authors for this document. */
- protected RevisionMarkAuthorTable _rmat;
+ private RevisionMarkAuthorTable _rmat;
/** Holds FSBA (shape) information */
private FSPATable _fspaHeaders;
private FSPATable _fspaMain;
/** Escher Drawing Group information */
- protected EscherRecordHolder _escherRecordHolder;
+ private EscherRecordHolder _escherRecordHolder;
/** Holds pictures table */
- protected PicturesTable _pictures;
+ private PicturesTable _pictures;
/** Holds Office Art objects */
- protected OfficeDrawingsImpl _officeDrawingsHeaders;
+ private OfficeDrawingsImpl _officeDrawingsHeaders;
/** Holds Office Art objects */
- protected OfficeDrawingsImpl _officeDrawingsMain;
+ private OfficeDrawingsImpl _officeDrawingsMain;
/** Holds the bookmarks tables */
- protected BookmarksTables _bookmarksTables;
+ private BookmarksTables _bookmarksTables;
/** Holds the bookmarks */
- protected Bookmarks _bookmarks;
+ private Bookmarks _bookmarks;
/** Holds the ending notes tables */
- protected NotesTables _endnotesTables = new NotesTables( NoteType.ENDNOTE );
+ private NotesTables _endnotesTables = new NotesTables( NoteType.ENDNOTE );
/** Holds the footnotes */
- protected Notes _endnotes = new NotesImpl( _endnotesTables );
+ private Notes _endnotes = new NotesImpl( _endnotesTables );
/** Holds the footnotes tables */
- protected NotesTables _footnotesTables = new NotesTables( NoteType.FOOTNOTE );
+ private NotesTables _footnotesTables = new NotesTables( NoteType.FOOTNOTE );
/** Holds the footnotes */
- protected Notes _footnotes = new NotesImpl( _footnotesTables );
+ private Notes _footnotes = new NotesImpl( _footnotesTables );
/** Holds the fields PLCFs */
- protected FieldsTables _fieldsTables;
+ private FieldsTables _fieldsTables;
/** Holds the fields */
- protected Fields _fields;
-
- protected HWPFDocument()
- {
- super();
- this._text = new StringBuilder("\r");
- }
+ private Fields _fields;
/**
* This constructor loads a Word document from an InputStream.
*/
@Override
public void write(File newFile) throws IOException {
- NPOIFSFileSystem pfs = POIFSFileSystem.create(newFile);
+ POIFSFileSystem pfs = POIFSFileSystem.create(newFile);
write(pfs, true);
pfs.writeFilesystem();
}
*/
@Override
public void write(OutputStream out) throws IOException {
- NPOIFSFileSystem pfs = new NPOIFSFileSystem();
+ POIFSFileSystem pfs = new POIFSFileSystem();
write(pfs, true);
pfs.writeFilesystem( out );
}
- private void write(NPOIFSFileSystem pfs, boolean copyOtherEntries) throws IOException {
+ private void write(POIFSFileSystem pfs, boolean copyOtherEntries) throws IOException {
// clear the offsets and sizes in our FileInformationBlock.
_fib.clearOffsetsSizes();
return bos.toByteArray();
}
- private static void write(NPOIFSFileSystem pfs, byte[] data, String name) throws IOException {
+ private static void write(POIFSFileSystem pfs, byte[] data, String name) throws IOException {
pfs.createOrUpdateDocument(new ByteArrayInputStream(data), name);
}
import org.apache.poi.hpsf.SummaryInformation;
import org.apache.poi.hslf.usermodel.HSLFSlideShowImpl;
import org.apache.poi.hwpf.HWPFTestDataSamples;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Before;
import org.junit.Test;
@Test
public void testWriteProperties() throws IOException {
// Just check we can write them back out into a filesystem
- NPOIFSFileSystem outFS = new NPOIFSFileSystem();
+ POIFSFileSystem outFS = new POIFSFileSystem();
doc.writeProperties(outFS);
// Should now hold them
ByteArrayOutputStream baos = new ByteArrayOutputStream();
// Write them out
- NPOIFSFileSystem outFS = new NPOIFSFileSystem();
+ POIFSFileSystem outFS = new POIFSFileSystem();
doc.writeProperties(outFS);
outFS.writeFilesystem(baos);
import org.apache.poi.POIDataSamples;
import org.apache.poi.hpbf.HPBFDocument;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Test;
public final class TestPublisherTextExtractor {
// And with NPOIFS
sample = _samples.openResourceAsStream("Sample.pub");
- NPOIFSFileSystem fs = new NPOIFSFileSystem(sample);
+ POIFSFileSystem fs = new POIFSFileSystem(sample);
HPBFDocument docNPOIFS = new HPBFDocument(fs);
ext = new PublisherTextExtractor(docNPOIFS);
assertEquals(SAMPLE_TEXT, ext.getText());
/**
* We have the same file saved for Publisher 98, Publisher 2000 and
* Publisher 2007. Check they all agree.
- *
- * @throws Exception
*/
@Test
public void testMultipleVersions() throws Exception {
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.hwpf.HWPFDocument;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.sl.usermodel.ObjectShape;
/**
* Tests that we can work with both {@link POIFSFileSystem}
- * and {@link NPOIFSFileSystem}
+ * and {@link POIFSFileSystem}
*/
@SuppressWarnings("resource")
@Test
public void testDifferentPOIFS() throws IOException {
// Open the two filesystems
File pptFile = slTests.getFile("basic_test_ppt_file.ppt");
- try (final NPOIFSFileSystem npoifs = new NPOIFSFileSystem(pptFile, true)) {
+ try (final POIFSFileSystem npoifs = new POIFSFileSystem(pptFile, true)) {
// Open directly
try (SlideShow<?,?> ppt = SlideShowFactory.create(npoifs.getRoot());
SlideShowExtractor<?,?> extractor = new SlideShowExtractor<>(ppt)) {
import org.apache.poi.poifs.crypt.HashAlgorithm;
import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIDecryptor;
import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIEncryptionHeader;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
-import org.junit.After;
-import org.junit.Before;
import org.junit.Test;
/**
* Tests that DocumentEncryption works properly.
*/
public class TestDocumentEncryption {
- POIDataSamples slTests = POIDataSamples.getSlideShowInstance();
+ private static final POIDataSamples slTests = POIDataSamples.getSlideShowInstance();
@Test
public void cryptoAPIDecryptionOther() throws Exception {
Biff8EncryptionKey.setCurrentUserPassword("hello");
try {
for (String pptFile : encPpts) {
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(slTests.getFile(pptFile), true);
+ try (POIFSFileSystem fs = new POIFSFileSystem(slTests.getFile(pptFile), true);
HSLFSlideShow ppt = new HSLFSlideShow(fs)) {
assertTrue(ppt.getSlides().size() > 0);
} catch (EncryptedPowerPointFileException e) {
public void cryptoAPIChangeKeySize() throws Exception {
String pptFile = "cryptoapi-proc2356.ppt";
Biff8EncryptionKey.setCurrentUserPassword("crypto");
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(slTests.getFile(pptFile), true);
+ try (POIFSFileSystem fs = new POIFSFileSystem(slTests.getFile(pptFile), true);
HSLFSlideShowImpl hss = new HSLFSlideShowImpl(fs)) {
// need to cache data (i.e. read all data) before changing the key size
List<HSLFPictureData> picsExpected = hss.getPictureData();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
hss.write(bos);
- try (NPOIFSFileSystem fs2 = new NPOIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()));
+ try (POIFSFileSystem fs2 = new POIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()));
HSLFSlideShowImpl hss2 = new HSLFSlideShowImpl(fs2)) {
List<HSLFPictureData> picsActual = hss2.getPictureData();
ByteArrayOutputStream expected = new ByteArrayOutputStream();
ByteArrayOutputStream actual = new ByteArrayOutputStream();
try {
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(slTests.getFile(pptFile), true);
+ try (POIFSFileSystem fs = new POIFSFileSystem(slTests.getFile(pptFile), true);
HSLFSlideShowImpl hss = new HSLFSlideShowImpl(fs)) {
hss.normalizeRecords();
// decrypted
ByteArrayInputStream bis = new ByteArrayInputStream(encrypted.toByteArray());
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(bis);
+ try (POIFSFileSystem fs = new POIFSFileSystem(bis);
HSLFSlideShowImpl hss = new HSLFSlideShowImpl(fs)) {
Biff8EncryptionKey.setCurrentUserPassword(null);
hss.write(actual);
// taken from a msdn blog:
// http://blogs.msdn.com/b/openspecification/archive/2009/05/08/dominic-salemno.aspx
Biff8EncryptionKey.setCurrentUserPassword("crypto");
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(slTests.getFile("cryptoapi-proc2356.ppt"));
+ try (POIFSFileSystem fs = new POIFSFileSystem(slTests.getFile("cryptoapi-proc2356.ppt"));
HSLFSlideShow ss = new HSLFSlideShow(fs)) {
HSLFSlide slide = ss.getSlides().get(0);
import org.apache.poi.ddf.EscherProperties;
import org.apache.poi.hslf.HSLFTestDataSamples;
import org.apache.poi.hslf.exceptions.OldPowerPointFormatException;
-import org.apache.poi.hslf.extractor.PowerPointExtractor;
import org.apache.poi.hslf.model.HeadersFooters;
import org.apache.poi.hslf.record.DocInfoListContainer;
import org.apache.poi.hslf.record.Document;
import org.apache.poi.hslf.record.VBAInfoAtom;
import org.apache.poi.hslf.record.VBAInfoContainer;
import org.apache.poi.hssf.usermodel.DummyGraphics2d;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.macros.VBAMacroReader;
import org.apache.poi.sl.draw.DrawFactory;
import org.apache.poi.sl.draw.DrawPaint;
assertNotNull(notes);
txrun = notes.getTextParagraphs().get(0);
assertEquals("Notes-1", HSLFTextParagraph.getRawText(txrun));
- assertEquals(false, txrun.get(0).getTextRuns().get(0).isBold());
+ assertFalse(txrun.get(0).getTextRuns().get(0).isBold());
//notes for the second slide are in bold
notes = ppt.getSlides().get(1).getNotes();
assertNotNull(notes);
txrun = notes.getTextParagraphs().get(0);
assertEquals("Notes-2", HSLFTextParagraph.getRawText(txrun));
- assertEquals(true, txrun.get(0).getTextRuns().get(0).isBold());
+ assertTrue(txrun.get(0).getTextRuns().get(0).isBold());
ppt.close();
}
//map slide number and starting phrase of its notes
Map<Integer, String> notesMap = new HashMap<>();
- notesMap.put(Integer.valueOf(4), "For decades before calculators");
- notesMap.put(Integer.valueOf(5), "Several commercial applications");
- notesMap.put(Integer.valueOf(6), "There are three variations of LNS that are discussed here");
- notesMap.put(Integer.valueOf(7), "Although multiply and square root are easier");
- notesMap.put(Integer.valueOf(8), "The bus Z is split into Z_H and Z_L");
+ notesMap.put(4, "For decades before calculators");
+ notesMap.put(5, "Several commercial applications");
+ notesMap.put(6, "There are three variations of LNS that are discussed here");
+ notesMap.put(7, "Although multiply and square root are easier");
+ notesMap.put(8, "The bus Z is split into Z_H and Z_L");
for (HSLFSlide slide : ppt.getSlides()) {
- Integer slideNumber = Integer.valueOf(slide.getSlideNumber());
+ Integer slideNumber = slide.getSlideNumber();
HSLFNotes notes = slide.getNotes();
if (notesMap.containsKey(slideNumber)){
assertNotNull(notes);
/**
* PowerPoint 95 files should throw a more helpful exception
- * @throws IOException
*/
@Test(expected=OldPowerPointFormatException.class)
public void bug41711() throws IOException {
@Test
public void bug45124() throws IOException {
- SlideShow<?,?> ppt = open("bug45124.ppt");
+ HSLFSlideShow ppt = open("bug45124.ppt");
Slide<?,?> slide1 = ppt.getSlides().get(1);
TextBox<?,?> res = slide1.createTextBox();
tp.setBulletStyle(Color.red, 'A');
- SlideShow<?,?> ppt2 = HSLFTestDataSamples.writeOutAndReadBack((HSLFSlideShow)ppt);
+ SlideShow<?,?> ppt2 = HSLFTestDataSamples.writeOutAndReadBack(ppt);
ppt.close();
res = (TextBox<?,?>)ppt2.getSlides().get(1).getShapes().get(1);
// For the test file, common sl draws textruns one by one and not mixed
// so we evaluate the whole iterator
Map<Attribute, Object> attributes = null;
- StringBuffer sb = new StringBuffer();
+ StringBuilder sb = new StringBuilder();
for (char c = iterator.first();
c != CharacterIterator.DONE;
//It isn't pretty, but it works...
private Map<String, String> getMacrosFromHSLF(String fileName) throws IOException {
InputStream is = null;
- NPOIFSFileSystem npoifs = null;
+ POIFSFileSystem npoifs = null;
try {
is = new FileInputStream(POIDataSamples.getSlideShowInstance().getFile(fileName));
- npoifs = new NPOIFSFileSystem(is);
+ npoifs = new POIFSFileSystem(is);
//TODO: should we run the VBAMacroReader on this npoifs?
//TBD: We know that ppt typically don't store macros in the regular place,
//but _can_ they?
long persistId = vbaAtom.getPersistIdRef();
for (HSLFObjectData objData : ppt.getEmbeddedObjects()) {
if (objData.getExOleObjStg().getPersistId() == persistId) {
- VBAMacroReader mr = new VBAMacroReader(objData.getInputStream());
- try {
+ try (VBAMacroReader mr = new VBAMacroReader(objData.getInputStream())) {
return mr.readMacros();
- } finally {
- mr.close();
}
}
}
import org.apache.poi.hslf.HSLFTestDataSamples;
import org.apache.poi.hslf.record.Record;
import org.apache.poi.hslf.record.SlideListWithText;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.junit.After;
import org.junit.Before;
*/
private static void assertMatchesFileC(HSLFSlideShow s) throws IOException {
// Grab the bytes of the file
- NPOIFSFileSystem fs = new NPOIFSFileSystem(HSLFTestDataSamples.openSampleFileStream(filenameC));
+ POIFSFileSystem fs = new POIFSFileSystem(HSLFTestDataSamples.openSampleFileStream(filenameC));
InputStream is = fs.createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT);
byte[] raw_file = IOUtils.toByteArray(is);
is.close();
// Now write out the slideshow
ByteArrayOutputStream baos = new ByteArrayOutputStream();
s.write(baos);
- fs = new NPOIFSFileSystem(new ByteArrayInputStream(baos.toByteArray()));
+ fs = new POIFSFileSystem(new ByteArrayInputStream(baos.toByteArray()));
is = fs.createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT);
byte[] raw_ss = IOUtils.toByteArray(is);
is.close();
slide.addShape(shape);
assertEquals(42.0, tr.getFontSize(), 0);
- assertEquals(true, rt.isBullet());
+ assertTrue(rt.isBullet());
assertEquals(50.0, rt.getLeftMargin(), 0);
assertEquals(0, rt.getIndent(), 0);
assertEquals('\u263A', (char)rt.getBulletChar());
rt = shape.getTextParagraphs().get(0);
tr = rt.getTextRuns().get(0);
assertEquals(42.0, tr.getFontSize(), 0);
- assertEquals(true, rt.isBullet());
+ assertTrue(rt.isBullet());
assertEquals(50.0, rt.getLeftMargin(), 0);
assertEquals(0, rt.getIndent(), 0);
assertEquals('\u263A', (char)rt.getBulletChar());
}
@Test
- public void testChineseParagraphs() throws Exception {
+ public void testChineseParagraphs() {
List<HSLFTextRun> rts;
HSLFTextRun rt;
List<List<HSLFTextParagraph>> txt;
import org.apache.poi.hsmf.datatypes.PropertyValue.TimePropertyValue;
import org.apache.poi.hsmf.dev.HSMFDump;
import org.apache.poi.hsmf.extractor.OutlookTextExtactor;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.LocaleUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
private static final String messageFails = "53784_fails.msg";
private static MAPIMessage mapiMessageSucceeds;
private static MAPIMessage mapiMessageFails;
- private static NPOIFSFileSystem fsMessageSucceeds;
- private static NPOIFSFileSystem fsMessageFails;
+ private static POIFSFileSystem fsMessageSucceeds;
+ private static POIFSFileSystem fsMessageFails;
private static SimpleDateFormat messageDateFormat;
private static TimeZone userTimeZone;
@BeforeClass
public static void initMapi() throws Exception {
POIDataSamples samples = POIDataSamples.getHSMFInstance();
- fsMessageSucceeds = new NPOIFSFileSystem(samples.getFile(messageSucceeds));
- fsMessageFails = new NPOIFSFileSystem(samples.getFile(messageFails));
+ fsMessageSucceeds = new POIFSFileSystem(samples.getFile(messageSucceeds));
+ fsMessageFails = new POIFSFileSystem(samples.getFile(messageFails));
mapiMessageSucceeds = new MAPIMessage(fsMessageSucceeds);
mapiMessageFails = new MAPIMessage(fsMessageFails);
* of our test files
*/
@Test
- public void testPropertiesFound() throws Exception {
+ public void testPropertiesFound() {
Map<MAPIProperty,List<PropertyValue>> props;
props = mapiMessageSucceeds.getMainChunks().getProperties();
* Check we find properties of a variety of different types
*/
@Test
- public void testPropertyValueTypes() throws Exception {
+ public void testPropertyValueTypes() {
Chunks mainChunks = mapiMessageSucceeds.getMainChunks();
// Ask to have the values looked up
import org.apache.poi.POIDataSamples;
import org.apache.poi.hsmf.MAPIMessage;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.LocaleUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@Test
public void testQuick() throws Exception {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
+ POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("quick.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
@Test
public void testSimple() throws Exception {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
+ POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
ext.close();
fis.close();
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
+ POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
ext = new OutlookTextExtactor(poifs);
String poifsTxt = ext.getText();
ext.close();
"example_sent_regular.msg", "example_sent_unicode.msg"
};
for(String file : files) {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile(file), true);
+ POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile(file), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
"example_received_regular.msg", "example_received_unicode.msg"
};
for(String file : files) {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile(file), true);
+ POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile(file), true);
MAPIMessage msg = new MAPIMessage(poifs);
}
/**
- * See also {@link org.apache.poi.extractor.TestExtractorFactory#testEmbeded()}
+ * See also {@link org.apache.poi.extractor.ooxml.TestExtractorFactory#testEmbeded()}
*/
+ @SuppressWarnings("JavadocReference")
+ @Test
public void testWithAttachments() throws Exception {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("attachment_test_msg.msg"), true);
+ POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("attachment_test_msg.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
ext.close();
poifs.close();
}
-
+
+ @Test
public void testWithAttachedMessage() throws Exception {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("58214_with_attachment.msg"), true);
+ POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("58214_with_attachment.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
String text = ext.getText();
ext.close();
poifs.close();
}
-
+
+ @Test
public void testEncodings() throws Exception {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("chinese-traditional.msg"), true);
+ POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("chinese-traditional.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
String text = ext.getText();
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Calendar;
import org.apache.poi.hsmf.datatypes.StringChunk;
import org.apache.poi.hsmf.datatypes.Types;
import org.apache.poi.hsmf.exceptions.ChunkNotFoundException;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.LocaleUtil;
import org.junit.Test;
@Test
public void testFindsCore() throws IOException, ChunkNotFoundException {
- NPOIFSFileSystem simple = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
+ POIFSFileSystem simple = new POIFSFileSystem(samples.getFile("quick.msg"), true);
// Check a few core things are present
simple.getRoot().getEntry(
@Test
public void testFindsRecips() throws IOException, ChunkNotFoundException {
- NPOIFSFileSystem simple = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
+ POIFSFileSystem simple = new POIFSFileSystem(samples.getFile("quick.msg"), true);
simple.getRoot().getEntry("__recip_version1.0_#00000000");
assertEquals("/O=HOSTEDSERVICE2/OU=FIRST ADMINISTRATIVE GROUP/CN=RECIPIENTS/CN=Kevin.roast@ben",
recips.recipientEmailChunk.getValue());
- String search = new String(recips.recipientSearchChunk.getValue(), "ASCII");
+ String search = new String(recips.recipientSearchChunk.getValue(), StandardCharsets.US_ASCII);
assertEquals("CN=KEVIN.ROAST@BEN\0", search.substring(search.length()-19));
// Now via MAPIMessage
// Now look at another message
- simple = new NPOIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
+ simple = new POIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
msg = new MAPIMessage(simple);
assertNotNull(msg.getRecipientDetailsChunks());
assertEquals(1, msg.getRecipientDetailsChunks().length);
assertEquals("SMTP", msg.getRecipientDetailsChunks()[0].deliveryTypeChunk.getValue());
- assertEquals(null, msg.getRecipientDetailsChunks()[0].recipientSMTPChunk);
- assertEquals(null, msg.getRecipientDetailsChunks()[0].recipientNameChunk);
+ assertNull(msg.getRecipientDetailsChunks()[0].recipientSMTPChunk);
+ assertNull(msg.getRecipientDetailsChunks()[0].recipientNameChunk);
assertEquals("travis@overwrittenstack.com", msg.getRecipientDetailsChunks()[0].recipientEmailChunk.getValue());
assertEquals("travis@overwrittenstack.com", msg.getRecipientEmailAddress());
@Test
public void testFindsMultipleRecipients() throws IOException, ChunkNotFoundException {
- NPOIFSFileSystem multiple = new NPOIFSFileSystem(samples.getFile("example_received_unicode.msg"), true);
+ POIFSFileSystem multiple = new POIFSFileSystem(samples.getFile("example_received_unicode.msg"), true);
multiple.getRoot().getEntry("__recip_version1.0_#00000000");
multiple.getRoot().getEntry("__recip_version1.0_#00000001");
@Test
public void testFindsNameId() throws IOException {
- NPOIFSFileSystem simple = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
+ POIFSFileSystem simple = new POIFSFileSystem(samples.getFile("quick.msg"), true);
simple.getRoot().getEntry("__nameid_version1.0");
@Test
public void testFindsAttachments() throws IOException, ChunkNotFoundException {
- NPOIFSFileSystem with = new NPOIFSFileSystem(samples.getFile("attachment_test_msg.msg"), true);
- NPOIFSFileSystem without = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
+ POIFSFileSystem with = new POIFSFileSystem(samples.getFile("attachment_test_msg.msg"), true);
+ POIFSFileSystem without = new POIFSFileSystem(samples.getFile("quick.msg"), true);
AttachmentChunks attachment;
*/
@Test
public void testOlk10SideProps() throws IOException, ChunkNotFoundException {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("51873.msg"), true);
+ POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("51873.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
// Check core details came through
import org.apache.poi.hwpf.OldWordFileFormatException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.Entry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.StringUtil;
import org.junit.Test;
/**
* Tests that we can work with both {@link POIFSFileSystem}
- * and {@link NPOIFSFileSystem}
+ * and {@link POIFSFileSystem}
*/
@Test
public void testDifferentPOIFS() throws Exception {
// Open the two filesystems
File file = docTests.getFile("test2.doc");
- try (NPOIFSFileSystem npoifs = new NPOIFSFileSystem(file, true)) {
+ try (POIFSFileSystem npoifs = new POIFSFileSystem(file, true)) {
DirectoryNode dir = npoifs.getRoot();
import org.apache.poi.POIDataSamples;
import org.apache.poi.extractor.POITextExtractor;
import org.apache.poi.extractor.OLE2ExtractorFactory;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Test;
import java.io.IOException;
@Test
public void testBug60374() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(SAMPLES.openResourceAsStream("cn.orthodox.www_divenbog_APRIL_30-APRIL.DOC"));
+ POIFSFileSystem fs = new POIFSFileSystem(SAMPLES.openResourceAsStream("cn.orthodox.www_divenbog_APRIL_30-APRIL.DOC"));
final POITextExtractor extractor = OLE2ExtractorFactory.createExtractor(fs);
// Check it gives text without error
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.apache.poi.hwpf.extractor.Word6Extractor;
import org.apache.poi.hwpf.extractor.WordExtractor;
import org.apache.poi.hwpf.model.*;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.POILogFactory;
private static final POILogger logger = POILogFactory.getLogger(TestBugs.class);
- public static void assertEqualsIgnoreNewline(String expected, String actual )
- {
+ private static void assertEqualsIgnoreNewline(String expected, String actual) {
String newExpected = expected.replaceAll("\r\n", "\n" )
.replaceAll("\r", "\n").trim();
String newActual = actual.replaceAll("\r\n", "\n" )
doc.close();
}
}
-
- private String getTextOldFile(String samplefile) throws IOException {
- HWPFOldDocument doc = HWPFTestDataSamples.openOldSampleFile(samplefile);
- Word6Extractor extractor = new Word6Extractor(doc);
- try {
- return extractor.getText();
- } finally {
- extractor.close();
- doc.close();
- }
- }
/**
* Bug 33519 - HWPF fails to read a file
try (InputStream is = POIDataSamples.getDocumentInstance()
.openResourceAsStream("Bug47742-text.txt")) {
byte[] expectedBytes = IOUtils.toByteArray(is);
- String expectedText = new String(expectedBytes, "utf-8")
+ String expectedText = new String(expectedBytes, StandardCharsets.UTF_8)
.substring(1); // strip-off the unicode marker
assertEqualsIgnoreNewline(expectedText, foundText);
}
@Test
- public void test49933() throws IOException
- {
- String text = getTextOldFile("Bug49933.doc");
-
- assertContains(text, "best.wine.jump.ru");
+ public void test49933() throws IOException {
+ try (HWPFOldDocument doc = HWPFTestDataSamples.openOldSampleFile("Bug49933.doc");
+ Word6Extractor extractor = new Word6Extractor(doc)) {
+ assertContains(extractor.getText(), "best.wine.jump.ru");
+ }
}
/**
* release from download site )
*/
@Test
- public void test51604p2() throws Exception
- {
+ public void test51604p2() {
HWPFDocument doc = HWPFTestDataSamples.openSampleFile("Bug51604.doc");
Range range = doc.getRange();
{
InputStream is = POIDataSamples.getDocumentInstance()
.openResourceAsStream("empty.doc");
- try (NPOIFSFileSystem npoifsFileSystem = new NPOIFSFileSystem(is)) {
+ try (POIFSFileSystem npoifsFileSystem = new POIFSFileSystem(is)) {
HWPFDocument hwpfDocument = new HWPFDocument(
npoifsFileSystem.getRoot());
hwpfDocument.write(new ByteArrayOutputStream());
* corrupt document
*/
@Test
- public void testBug51834() throws Exception
- {
+ public void testBug51834() {
/*
* we don't have Java test for this file - it should be checked using
* Microsoft BFF Validator. But check read-write-read anyway. -- sergey
* Disabled pending a fix for the bug
*/
@Test
- public void test56880() throws Exception {
+ public void test56880() {
HWPFDocument doc =
HWPFTestDataSamples.openSampleFile("56880.doc");
assertEqualsIgnoreNewline("Check Request", doc.getRange().text());
{
assertNotNull(getText("Bug61268.doc"));
}
-
- // These are the values the are expected to be read when the file
- // is checked.
- private final int section1LeftMargin = 1440;
- private final int section1RightMargin = 1440;
- private final int section1TopMargin = 1440;
- private final int section1BottomMargin = 1440;
- private final int section1NumColumns = 1;
+
private int section2LeftMargin = 1440;
private int section2RightMargin = 1440;
private int section2TopMargin = 1440;
private int section2BottomMargin = 1440;
- private final int section2NumColumns = 3;
-
+
@Test
@SuppressWarnings("SuspiciousNameCombination")
public void testHWPFSections() {
@SuppressWarnings("Duplicates")
private void assertSection1Margin(Section section) {
+ int section1BottomMargin = 1440;
assertEquals(section1BottomMargin, section.getMarginBottom());
+ // These are the values the are expected to be read when the file
+ // is checked.
+ int section1LeftMargin = 1440;
assertEquals(section1LeftMargin, section.getMarginLeft());
+ int section1RightMargin = 1440;
assertEquals(section1RightMargin, section.getMarginRight());
+ int section1TopMargin = 1440;
assertEquals(section1TopMargin, section.getMarginTop());
+ int section1NumColumns = 1;
assertEquals(section1NumColumns, section.getNumColumns());
}
assertEquals(section2LeftMargin, section.getMarginLeft());
assertEquals(section2RightMargin, section.getMarginRight());
assertEquals(section2TopMargin, section.getMarginTop());
+ int section2NumColumns = 3;
assertEquals(section2NumColumns, section.getNumColumns());
}
import org.apache.poi.hwpf.HWPFDocument;
import org.apache.poi.hwpf.HWPFTestCase;
import org.apache.poi.hwpf.HWPFTestDataSamples;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.TempFile;
}
// Open from the temp file in read-write mode
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(file, false);
+ POIFSFileSystem poifs = new POIFSFileSystem(file, false);
HWPFDocument doc = new HWPFDocument(poifs.getRoot());
Range r = doc.getRange();
assertEquals("I am a test document\r", r.getParagraph(0).text());
doc.close();
poifs.close();
- poifs = new NPOIFSFileSystem(file);
+ poifs = new POIFSFileSystem(file);
doc = new HWPFDocument(poifs.getRoot());
r = doc.getRange();
assertEquals("X XX a test document\r", r.getParagraph(0).text());
@Test(expected=IllegalStateException.class)
public void testInvalidInPlaceWriteNPOIFS() throws Exception {
// Can't work for Read-Only files
- NPOIFSFileSystem fs = new NPOIFSFileSystem(SAMPLES.getFile("SampleDoc.doc"), true);
+ POIFSFileSystem fs = new POIFSFileSystem(SAMPLES.getFile("SampleDoc.doc"), true);
HWPFDocument doc = new HWPFDocument(fs.getRoot());
try {
doc.write();
import org.apache.poi.hpsf.SummaryInformation;
import org.apache.poi.hssf.HSSFTestDataSamples;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Before;
import org.junit.Test;
@Test
public void writeProperties() throws IOException {
// Just check we can write them back out into a filesystem
- NPOIFSFileSystem outFS = new NPOIFSFileSystem();
+ POIFSFileSystem outFS = new POIFSFileSystem();
doc.readProperties();
doc.writeProperties(outFS);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
// Write them out
- NPOIFSFileSystem outFS = new NPOIFSFileSystem();
+ POIFSFileSystem outFS = new POIFSFileSystem();
doc.readProperties();
doc.writeProperties(outFS);
outFS.writeFilesystem(baos);
import org.apache.poi.hpsf.SummaryInformation;
import org.apache.poi.hssf.HSSFTestDataSamples;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Test;
/**
@Test
public void test54233() throws IOException, NoPropertySetStreamException, MarkUnsupportedException {
InputStream is = _samples.openResourceAsStream("TestNon4ByteBoundary.doc");
- NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
+ POIFSFileSystem fs = new POIFSFileSystem(is);
is.close();
SummaryInformation si = (SummaryInformation)
ByteArrayOutputStream baos = new ByteArrayOutputStream();
doc.write(baos);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
- doc = new HPSFPropertiesOnlyDocument(new NPOIFSFileSystem(bais));
+ doc = new HPSFPropertiesOnlyDocument(new POIFSFileSystem(bais));
// Check properties are still there
assertEquals("Microsoft Word 10.0", si.getApplicationName());
@Test
public void test56138() throws IOException, NoPropertySetStreamException {
InputStream is = _samples.openResourceAsStream("TestZeroLengthCodePage.mpp");
- NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
+ POIFSFileSystem fs = new POIFSFileSystem(is);
is.close();
SummaryInformation si = (SummaryInformation)
si.setLastPrinted(new Date(millis));
try (HSSFWorkbook wb2 = HSSFTestDataSamples.writeOutAndReadBack(wb)) {
SummaryInformation si2 = wb2.getSummaryInformation();
- Date d = si.getLastPrinted();
+ Date d = si2.getLastPrinted();
assertNotNull(d);
assertEquals(millis, d.getTime());
}
import org.apache.poi.hpsf.PropertySet;
import org.apache.poi.hpsf.PropertySetFactory;
import org.apache.poi.poifs.filesystem.DirectoryEntry;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Test;
import org.junit.runner.RunWith;
public static Iterable<Object[]> files() {
final List<Object[]> files = new ArrayList<>();
- _samples.getFile("").listFiles(new FileFilter() {
- @Override
- public boolean accept(final File f) {
- if (f.getName().startsWith("Test")) { // && f.getName().equals("TestCorel.shw")
- files.add(new Object[]{ f });
- }
- return false;
+ _samples.getFile("").listFiles(f -> {
+ if (f.getName().startsWith("Test")) {
+ files.add(new Object[]{ f });
}
+ return false;
});
return files;
public void read() throws IOException, NoPropertySetStreamException, MarkUnsupportedException {
/* Read the POI filesystem's property set streams: */
for (POIFile pf : Util.readPropertySets(file)) {
- final InputStream in = new ByteArrayInputStream(pf.getBytes());
- try {
+ try (InputStream in = new ByteArrayInputStream(pf.getBytes())) {
PropertySetFactory.create(in);
- } finally {
- in.close();
}
}
}
}
/**
- * <p>This test method checks whether DocumentSummary information streams
+ * This test method checks whether DocumentSummary information streams
* can be read. This is done by opening all "Test*" files in the 'poifs' directrory
* pointed to by the "POI.testdata.path" system property, trying to extract
* the document summary information stream in the root directory and calling
- * its get... methods.</p>
- * @throws Exception
+ * its get... methods.
*/
@Test
public void readDocumentSummaryInformation() throws Exception {
/* Read a test document <em>doc</em> into a POI filesystem. */
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(file, true);
- try {
+ try (POIFSFileSystem poifs = new POIFSFileSystem(file, true)) {
final DirectoryEntry dir = poifs.getRoot();
/*
* If there is a document summry information stream, read it from
*/
if (dir.hasEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)) {
final DocumentSummaryInformation dsi = TestWriteWellKnown.getDocumentSummaryInformation(poifs);
-
+
/* Execute the get... methods. */
dsi.getByteCount();
dsi.getByteOrder();
dsi.getScale();
dsi.getSlideCount();
}
- } finally {
- poifs.close();
}
}
/**
- * <p>Tests the simplified custom properties by reading them from the
- * available test files.</p>
+ * Tests the simplified custom properties by reading them from the
+ * available test files.
*
- * @throws Throwable if anything goes wrong.
+ * @throws Exception if anything goes wrong.
*/
@Test
public void readCustomPropertiesFromFiles() throws Exception {
/* Read a test document <em>doc</em> into a POI filesystem. */
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(file);
- try {
+ try (POIFSFileSystem poifs = new POIFSFileSystem(file)) {
/*
* If there is a document summry information stream, read it from
* the POI filesystem, else create a new one.
assertNotNull(cp.getName());
assertNotNull(cp.getValue());
}
- } finally {
- poifs.close();
}
}
import org.apache.poi.hpsf.ClassID;
import org.apache.poi.hpsf.DocumentSummaryInformation;
import org.apache.poi.hpsf.HPSFException;
-import org.apache.poi.hpsf.IllegalPropertySetDataException;
import org.apache.poi.hpsf.NoFormatIDException;
import org.apache.poi.hpsf.NoPropertySetStreamException;
import org.apache.poi.hpsf.Property;
import org.apache.poi.poifs.eventfilesystem.POIFSReaderListener;
import org.apache.poi.poifs.filesystem.DirectoryEntry;
import org.apache.poi.poifs.filesystem.DocumentNode;
-import org.apache.poi.poifs.filesystem.NDocumentInputStream;
-import org.apache.poi.poifs.filesystem.NDocumentOutputStream;
-import org.apache.poi.poifs.filesystem.NPOIFSDocument;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.DocumentInputStream;
+import org.apache.poi.poifs.filesystem.DocumentOutputStream;
+import org.apache.poi.poifs.filesystem.POIFSDocument;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.CodePageUtil;
import org.apache.poi.util.IOUtils;
*/
@Test
public void inPlaceNPOIFSWrite() throws Exception {
- NPOIFSFileSystem fs;
+ POIFSFileSystem fs;
DirectoryEntry root;
DocumentNode sinfDoc;
DocumentNode dinfDoc;
// Open the copy in read/write mode
- fs = new NPOIFSFileSystem(copy, false);
+ fs = new POIFSFileSystem(copy, false);
root = fs.getRoot();
sinfDoc = (DocumentNode)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME);
dinfDoc = (DocumentNode)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME);
- InputStream sinfStream = new NDocumentInputStream(sinfDoc);
+ InputStream sinfStream = new DocumentInputStream(sinfDoc);
sinf = (SummaryInformation)PropertySetFactory.create(sinfStream);
sinfStream.close();
assertEquals(131077, sinf.getOSVersion());
- InputStream dinfStream = new NDocumentInputStream(dinfDoc);
+ InputStream dinfStream = new DocumentInputStream(dinfDoc);
dinf = (DocumentSummaryInformation)PropertySetFactory.create(dinfStream);
dinfStream.close();
assertEquals(131077, dinf.getOSVersion());
assertNotNull(sinfDoc);
assertNotNull(dinfDoc);
- new NPOIFSDocument(sinfDoc).replaceContents(sinf.toInputStream());
- new NPOIFSDocument(dinfDoc).replaceContents(dinf.toInputStream());
+ new POIFSDocument(sinfDoc).replaceContents(sinf.toInputStream());
+ new POIFSDocument(dinfDoc).replaceContents(dinf.toInputStream());
// Check it didn't get changed
sinfDoc = (DocumentNode)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME);
dinfDoc = (DocumentNode)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME);
- InputStream sinfStream2 = new NDocumentInputStream(sinfDoc);
+ InputStream sinfStream2 = new DocumentInputStream(sinfDoc);
sinf = (SummaryInformation)PropertySetFactory.create(sinfStream2);
sinfStream2.close();
assertEquals(131077, sinf.getOSVersion());
- InputStream dinfStream2 = new NDocumentInputStream(dinfDoc);
+ InputStream dinfStream2 = new DocumentInputStream(dinfDoc);
dinf = (DocumentSummaryInformation)PropertySetFactory.create(dinfStream2);
dinfStream2.close();
assertEquals(131077, dinf.getOSVersion());
inp.close();
out.close();
- fs = new NPOIFSFileSystem(copy, false);
+ fs = new POIFSFileSystem(copy, false);
root = fs.getRoot();
// Read the properties in once more
sinfDoc = (DocumentNode)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME);
dinfDoc = (DocumentNode)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME);
- InputStream sinfStream3 = new NDocumentInputStream(sinfDoc);
+ InputStream sinfStream3 = new DocumentInputStream(sinfDoc);
sinf = (SummaryInformation)PropertySetFactory.create(sinfStream3);
sinfStream3.close();
assertEquals(131077, sinf.getOSVersion());
- InputStream dinfStream3 = new NDocumentInputStream(dinfDoc);
+ InputStream dinfStream3 = new DocumentInputStream(dinfDoc);
dinf = (DocumentSummaryInformation)PropertySetFactory.create(dinfStream3);
dinfStream3.close();
assertEquals(131077, dinf.getOSVersion());
// Have them write themselves in-place with no changes, as an OutputStream
- OutputStream soufStream = new NDocumentOutputStream(sinfDoc);
+ OutputStream soufStream = new DocumentOutputStream(sinfDoc);
sinf.write(soufStream);
soufStream.close();
- OutputStream doufStream = new NDocumentOutputStream(dinfDoc);
+ OutputStream doufStream = new DocumentOutputStream(dinfDoc);
dinf.write(doufStream);
doufStream.close();
sinfDoc = (DocumentNode)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME);
dinfDoc = (DocumentNode)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME);
- InputStream sinfStream4 = new NDocumentInputStream(sinfDoc);
+ InputStream sinfStream4 = new DocumentInputStream(sinfDoc);
byte[] sinfData = IOUtils.toByteArray(sinfStream4);
sinfStream4.close();
- InputStream dinfStream4 = new NDocumentInputStream(dinfDoc);
+ InputStream dinfStream4 = new DocumentInputStream(dinfDoc);
byte[] dinfData = IOUtils.toByteArray(dinfStream4);
dinfStream4.close();
assertThat(sinfBytes.toByteArray(), equalTo(sinfData));
// Read back in as-is
- InputStream sinfStream5 = new NDocumentInputStream(sinfDoc);
+ InputStream sinfStream5 = new DocumentInputStream(sinfDoc);
sinf = (SummaryInformation)PropertySetFactory.create(sinfStream5);
sinfStream5.close();
assertEquals(131077, sinf.getOSVersion());
- InputStream dinfStream5 = new NDocumentInputStream(dinfDoc);
+ InputStream dinfStream5 = new DocumentInputStream(dinfDoc);
dinf = (DocumentSummaryInformation)PropertySetFactory.create(dinfStream5);
dinfStream5.close();
assertEquals(131077, dinf.getOSVersion());
// Save this into the filesystem
- OutputStream soufStream2 = new NDocumentOutputStream(sinfDoc);
+ OutputStream soufStream2 = new DocumentOutputStream(sinfDoc);
sinf.write(soufStream2);
soufStream2.close();
- OutputStream doufStream2 = new NDocumentOutputStream(dinfDoc);
+ OutputStream doufStream2 = new DocumentOutputStream(dinfDoc);
dinf.write(doufStream2);
doufStream2.close();
// Read them back in again
sinfDoc = (DocumentNode)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME);
- InputStream sinfStream6 = new NDocumentInputStream(sinfDoc);
+ InputStream sinfStream6 = new DocumentInputStream(sinfDoc);
sinf = (SummaryInformation)PropertySetFactory.create(sinfStream6);
sinfStream6.close();
assertEquals(131077, sinf.getOSVersion());
dinfDoc = (DocumentNode)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME);
- InputStream dinfStream6 = new NDocumentInputStream(dinfDoc);
+ InputStream dinfStream6 = new DocumentInputStream(dinfDoc);
dinf = (DocumentSummaryInformation)PropertySetFactory.create(dinfStream6);
dinfStream6.close();
assertEquals(131077, dinf.getOSVersion());
fs.writeFilesystem();
fs.close();
- fs = new NPOIFSFileSystem(copy);
+ fs = new POIFSFileSystem(copy);
root = fs.getRoot();
// Re-check on load
sinfDoc = (DocumentNode)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME);
- InputStream sinfStream7 = new NDocumentInputStream(sinfDoc);
+ InputStream sinfStream7 = new DocumentInputStream(sinfDoc);
sinf = (SummaryInformation)PropertySetFactory.create(sinfStream7);
sinfStream7.close();
assertEquals(131077, sinf.getOSVersion());
dinfDoc = (DocumentNode)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME);
- InputStream dinfStream7 = new NDocumentInputStream(dinfDoc);
+ InputStream dinfStream7 = new DocumentInputStream(dinfDoc);
dinf = (DocumentSummaryInformation)PropertySetFactory.create(dinfStream7);
dinfStream7.close();
assertEquals(131077, dinf.getOSVersion());
import org.apache.poi.hpsf.CustomProperties;
import org.apache.poi.hpsf.CustomProperty;
import org.apache.poi.hpsf.DocumentSummaryInformation;
-import org.apache.poi.hpsf.MarkUnsupportedException;
import org.apache.poi.hpsf.NoPropertySetStreamException;
import org.apache.poi.hpsf.Property;
import org.apache.poi.hpsf.PropertySet;
import org.apache.poi.hpsf.UnexpectedPropertySetTypeException;
import org.apache.poi.hpsf.Variant;
import org.apache.poi.hpsf.VariantSupport;
-import org.apache.poi.hpsf.WritingNotSupportedException;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LocaleUtil;
import org.apache.poi.util.TempFile;
VariantSupport.setLogUnsupportedTypes(false);
}
- static final String P_APPLICATION_NAME = "ApplicationName";
- static final String P_AUTHOR = "Author";
- static final int P_CHAR_COUNT = 4712;
- static final String P_COMMENTS = "Comments";
- static final Date P_CREATE_DATE_TIME;
- static final long P_EDIT_TIME = 4713 * 1000 * 10;
- static final String P_KEYWORDS = "Keywords";
- static final String P_LAST_AUTHOR = "LastAuthor";
- static final Date P_LAST_PRINTED;
- static final Date P_LAST_SAVE_DATE_TIME;
- static final int P_PAGE_COUNT = 4714;
- static final String P_REV_NUMBER = "RevNumber";
- static final int P_SECURITY = 1;
- static final String P_SUBJECT = "Subject";
- static final String P_TEMPLATE = "Template";
+ private static final String P_APPLICATION_NAME = "ApplicationName";
+ private static final String P_AUTHOR = "Author";
+ private static final int P_CHAR_COUNT = 4712;
+ private static final String P_COMMENTS = "Comments";
+ private static final Date P_CREATE_DATE_TIME;
+ private static final long P_EDIT_TIME = 4713 * 1000 * 10;
+ private static final String P_KEYWORDS = "Keywords";
+ private static final String P_LAST_AUTHOR = "LastAuthor";
+ private static final Date P_LAST_PRINTED;
+ private static final Date P_LAST_SAVE_DATE_TIME;
+ private static final int P_PAGE_COUNT = 4714;
+ private static final String P_REV_NUMBER = "RevNumber";
+ private static final int P_SECURITY = 1;
+ private static final String P_SUBJECT = "Subject";
+ private static final String P_TEMPLATE = "Template";
// FIXME (byte array properties not yet implemented): static final byte[] P_THUMBNAIL = new byte[123];
- static final String P_TITLE = "Title";
- static final int P_WORD_COUNT = 4715;
+ private static final String P_TITLE = "Title";
+ private static final int P_WORD_COUNT = 4715;
- static final int P_BYTE_COUNT = 4716;
- static final String P_CATEGORY = "Category";
- static final String P_COMPANY = "Company";
+ private static final int P_BYTE_COUNT = 4716;
+ private static final String P_CATEGORY = "Category";
+ private static final String P_COMPANY = "Company";
// FIXME (byte array properties not yet implemented): static final byte[] P_DOCPARTS = new byte[123];
// FIXME (byte array properties not yet implemented): static final byte[] P_HEADING_PAIR = new byte[123];
- static final int P_HIDDEN_COUNT = 4717;
- static final int P_LINE_COUNT = 4718;
- static final boolean P_LINKS_DIRTY = true;
- static final String P_MANAGER = "Manager";
- static final int P_MM_CLIP_COUNT = 4719;
- static final int P_NOTE_COUNT = 4720;
- static final int P_PAR_COUNT = 4721;
- static final String P_PRESENTATION_FORMAT = "PresentationFormat";
- static final boolean P_SCALE = false;
- static final int P_SLIDE_COUNT = 4722;
- static final Date now = new Date();
-
- static final Integer POSITIVE_INTEGER = new Integer(2222);
- static final Long POSITIVE_LONG = new Long(3333);
- static final Double POSITIVE_DOUBLE = new Double(4444);
- static final Integer NEGATIVE_INTEGER = new Integer(2222);
- static final Long NEGATIVE_LONG = new Long(3333);
- static final Double NEGATIVE_DOUBLE = new Double(4444);
-
- static final Integer MAX_INTEGER = new Integer(Integer.MAX_VALUE);
- static final Integer MIN_INTEGER = new Integer(Integer.MIN_VALUE);
- static final Long MAX_LONG = new Long(Long.MAX_VALUE);
- static final Long MIN_LONG = new Long(Long.MIN_VALUE);
- static final Double MAX_DOUBLE = new Double(Double.MAX_VALUE);
- static final Double MIN_DOUBLE = new Double(Double.MIN_VALUE);
+ private static final int P_HIDDEN_COUNT = 4717;
+ private static final int P_LINE_COUNT = 4718;
+ private static final boolean P_LINKS_DIRTY = true;
+ private static final String P_MANAGER = "Manager";
+ private static final int P_MM_CLIP_COUNT = 4719;
+ private static final int P_NOTE_COUNT = 4720;
+ private static final int P_PAR_COUNT = 4721;
+ private static final String P_PRESENTATION_FORMAT = "PresentationFormat";
+ private static final boolean P_SCALE = false;
+ private static final int P_SLIDE_COUNT = 4722;
+ private static final Date now = new Date();
+
+ private static final Integer POSITIVE_INTEGER = 2222;
+ private static final Long POSITIVE_LONG = 3333L;
+ private static final Double POSITIVE_DOUBLE = 4444d;
+ private static final Integer NEGATIVE_INTEGER = 2222;
+ private static final Long NEGATIVE_LONG = 3333L;
+ private static final Double NEGATIVE_DOUBLE = 4444d;
+
+ private static final Integer MAX_INTEGER = Integer.MAX_VALUE;
+ private static final Integer MIN_INTEGER = Integer.MIN_VALUE;
+ private static final Long MAX_LONG = Long.MAX_VALUE;
+ private static final Long MIN_LONG = Long.MIN_VALUE;
+ private static final Double MAX_DOUBLE = Double.MAX_VALUE;
+ private static final Double MIN_DOUBLE = Double.MIN_VALUE;
static {
Calendar cal = LocaleUtil.getLocaleCalendar(2000, 6, 6, 6, 6, 6);
* be found in the property streams of <em>doc3</em>.</p></li> </ol>
*
* @throws IOException if some I/O error occurred.
- * @throws MarkUnsupportedException
- * @throws NoPropertySetStreamException
- * @throws UnexpectedPropertySetTypeException
- * @throws WritingNotSupportedException
*/
@Test
public void testWriteWellKnown() throws Exception {
CustomProperties cps1 = write1stFile(doc1, doc2);
CustomProperties cps2 = write2ndFile(doc2, doc3);
- write3rdFile(doc3, null);
+ write3rdFile(doc3);
assertEquals(cps1, cps2);
}
*/
private static CustomProperties write1stFile(File fileIn, File fileOut) throws Exception {
/* Read a test document <em>doc1</em> into a POI filesystem. */
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(fileIn, false);
+ POIFSFileSystem poifs = new POIFSFileSystem(fileIn, false);
/*
* Read the summary information stream and the document summary
* values.
*/
private static CustomProperties write2ndFile(File fileIn, File fileOut) throws Exception {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(fileIn, false);
+ POIFSFileSystem poifs = new POIFSFileSystem(fileIn, false);
SummaryInformation si = getSummaryInformation(poifs);
DocumentSummaryInformation dsi = getDocumentSummaryInformation(poifs);
* and document summary information. All properties removed before must not
* be found in the property streams of {@code doc3}.
*/
- private static CustomProperties write3rdFile(File fileIn, File fileOut) throws Exception {
- NPOIFSFileSystem poifs = new NPOIFSFileSystem(fileIn, false);
+ private static void write3rdFile(File fileIn) throws Exception {
+ POIFSFileSystem poifs = new POIFSFileSystem(fileIn, false);
SummaryInformation si = getSummaryInformation(poifs);
DocumentSummaryInformation dsi = getDocumentSummaryInformation(poifs);
assertEquals(0, dsi.getSlideCount());
assertTrue(dsi.wasNull());
poifs.close();
-
- return dsi.getCustomProperties();
}
- private static SummaryInformation getSummaryInformation(NPOIFSFileSystem poifs) throws Exception {
+ private static SummaryInformation getSummaryInformation(POIFSFileSystem poifs) throws Exception {
DocumentInputStream dis = poifs.createDocumentInputStream(SummaryInformation.DEFAULT_STREAM_NAME);
PropertySet ps = new PropertySet(dis);
SummaryInformation si = new SummaryInformation(ps);
return si;
}
- static DocumentSummaryInformation getDocumentSummaryInformation(NPOIFSFileSystem poifs)
- throws IOException, NoPropertySetStreamException, UnexpectedPropertySetTypeException, MarkUnsupportedException {
+ static DocumentSummaryInformation getDocumentSummaryInformation(POIFSFileSystem poifs)
+ throws IOException, NoPropertySetStreamException, UnexpectedPropertySetTypeException {
if (!poifs.getRoot().hasEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)) {
return null;
}
p.setType(Variant.VT_LPWSTR);
p.setValue(VALUE_1);
s.setProperty(p);
- dictionary.put(Long.valueOf(ID_1), NAME_1);
+ dictionary.put((long) ID_1, NAME_1);
s.setDictionary(dictionary);
cps = dsi.getCustomProperties();
assertEquals(1, cps.size());
/* Add another custom property. */
s.setProperty(ID_2, Variant.VT_LPWSTR, VALUE_1);
- dictionary.put(Long.valueOf(ID_2), NAME_1);
+ dictionary.put((long) ID_2, NAME_1);
s.setDictionary(dictionary);
cps = dsi.getCustomProperties();
assertEquals(1, cps.size());
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
-import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import org.apache.poi.hssf.HSSFTestDataSamples;
import org.apache.poi.hssf.extractor.ExcelExtractor;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Test;
@Test
public void testNormalProperties() throws Exception {
- POIFSFileSystem fs = new POIFSFileSystem(_samples.openResourceAsStream("TestMickey.doc"));
- HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(fs);
- try {
- // Check each bit in turn
- String summary = ext.getSummaryInformationText();
- String docSummary = ext.getDocumentSummaryInformationText();
-
- assertContains(summary, "TEMPLATE = Normal");
- assertContains(summary, "SUBJECT = sample subject");
- assertContains(docSummary, "MANAGER = sample manager");
- assertContains(docSummary, "COMPANY = sample company");
-
- // Now overall
- String text = ext.getText();
- assertContains(text, "TEMPLATE = Normal");
- assertContains(text, "SUBJECT = sample subject");
- assertContains(text, "MANAGER = sample manager");
- assertContains(text, "COMPANY = sample company");
- } finally {
- ext.close();
+ try (InputStream is = _samples.openResourceAsStream("TestMickey.doc");
+ POIFSFileSystem fs = new POIFSFileSystem(is);
+ HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(fs)) {
+ // Check each bit in turn
+ String summary = ext.getSummaryInformationText();
+ String docSummary = ext.getDocumentSummaryInformationText();
+
+ assertContains(summary, "TEMPLATE = Normal");
+ assertContains(summary, "SUBJECT = sample subject");
+ assertContains(docSummary, "MANAGER = sample manager");
+ assertContains(docSummary, "COMPANY = sample company");
+
+ // Now overall
+ String text = ext.getText();
+ assertContains(text, "TEMPLATE = Normal");
+ assertContains(text, "SUBJECT = sample subject");
+ assertContains(text, "MANAGER = sample manager");
+ assertContains(text, "COMPANY = sample company");
}
}
@Test
public void testNormalUnicodeProperties() throws Exception {
- POIFSFileSystem fs = new POIFSFileSystem(_samples.openResourceAsStream("TestUnicode.xls"));
- HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(fs);
- try {
- // Check each bit in turn
- String summary = ext.getSummaryInformationText();
- String docSummary = ext.getDocumentSummaryInformationText();
-
- assertContains(summary, "AUTHOR = marshall");
- assertContains(summary, "TITLE = Titel: \u00c4h");
- assertContains(docSummary, "COMPANY = Schreiner");
- assertContains(docSummary, "SCALE = false");
-
- // Now overall
- String text = ext.getText();
- assertContains(text, "AUTHOR = marshall");
- assertContains(text, "TITLE = Titel: \u00c4h");
- assertContains(text, "COMPANY = Schreiner");
- assertContains(text, "SCALE = false");
- } finally {
- ext.close();
+
+ try (InputStream is = _samples.openResourceAsStream("TestUnicode.xls");
+ POIFSFileSystem fs = new POIFSFileSystem(is);
+ HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(fs)) {
+ // Check each bit in turn
+ String summary = ext.getSummaryInformationText();
+ String docSummary = ext.getDocumentSummaryInformationText();
+
+ assertContains(summary, "AUTHOR = marshall");
+ assertContains(summary, "TITLE = Titel: \u00c4h");
+ assertContains(docSummary, "COMPANY = Schreiner");
+ assertContains(docSummary, "SCALE = false");
+
+ // Now overall
+ String text = ext.getText();
+ assertContains(text, "AUTHOR = marshall");
+ assertContains(text, "TITLE = Titel: \u00c4h");
+ assertContains(text, "COMPANY = Schreiner");
+ assertContains(text, "SCALE = false");
}
}
@Test
public void testCustomProperties() throws Exception {
- POIFSFileSystem fs = new POIFSFileSystem(
- _samples.openResourceAsStream("TestMickey.doc")
- );
- HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(fs);
- try {
- // Custom properties are part of the document info stream
- String dinfText = ext.getDocumentSummaryInformationText();
- assertContains(dinfText, "Client = sample client");
- assertContains(dinfText, "Division = sample division");
-
- String text = ext.getText();
- assertContains(text, "Client = sample client");
- assertContains(text, "Division = sample division");
- } finally {
- ext.close();
- }
+ try (InputStream is = _samples.openResourceAsStream("TestMickey.doc");
+ POIFSFileSystem fs = new POIFSFileSystem(is);
+ HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(fs)) {
+ // Custom properties are part of the document info stream
+ String dinfText = ext.getDocumentSummaryInformationText();
+ assertContains(dinfText, "Client = sample client");
+ assertContains(dinfText, "Division = sample division");
+
+ String text = ext.getText();
+ assertContains(text, "Client = sample client");
+ assertContains(text, "Division = sample division");
+ }
}
@Test
@Test
public void test42726() throws IOException {
- HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(HSSFTestDataSamples.openSampleWorkbook("42726.xls"));
- try {
- String txt = ext.getText();
- assertContains(txt, "PID_AUTHOR");
- assertContains(txt, "PID_EDITTIME");
- assertContains(txt, "PID_REVNUMBER");
- assertContains(txt, "PID_THUMBNAIL");
- } finally {
- ext.close();
- }
+ try (HSSFWorkbook wb = HSSFTestDataSamples.openSampleWorkbook("42726.xls");
+ HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(wb)) {
+ String txt = ext.getText();
+ assertContains(txt, "PID_AUTHOR");
+ assertContains(txt, "PID_EDITTIME");
+ assertContains(txt, "PID_REVNUMBER");
+ assertContains(txt, "PID_THUMBNAIL");
+ }
}
@Test
@Test
public void test52258() throws Exception {
- POIFSFileSystem fs = new POIFSFileSystem(_samples.openResourceAsStream("TestVisioWithCodepage.vsd"));
- HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(fs);
- try {
- assertNotNull(ext.getDocSummaryInformation());
- assertNotNull(ext.getDocumentSummaryInformationText());
- assertNotNull(ext.getSummaryInformation());
- assertNotNull(ext.getSummaryInformationText());
- assertNotNull(ext.getText());
- } finally {
- ext.close();
- }
+ try (InputStream is = _samples.openResourceAsStream("TestVisioWithCodepage.vsd");
+ POIFSFileSystem fs = new POIFSFileSystem(is);
+ HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(fs)) {
+ assertNotNull(ext.getDocSummaryInformation());
+ assertNotNull(ext.getDocumentSummaryInformationText());
+ assertNotNull(ext.getSummaryInformation());
+ assertNotNull(ext.getSummaryInformationText());
+ assertNotNull(ext.getText());
+ }
}
@Test
- public void test61300Extractor() throws NoPropertySetStreamException, MarkUnsupportedException, IOException {
- try (NPOIFSFileSystem npoifs = new NPOIFSFileSystem(
+ public void test61300Extractor() throws IOException {
+ try (POIFSFileSystem npoifs = new POIFSFileSystem(
POIDataSamples.getPOIFSInstance().getFile("61300.bin"))) {
HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(npoifs);
assertContains(ext.getText(), "PID_CODEPAGE = 1252");
import org.apache.poi.POIDataSamples;
import org.apache.poi.hssf.OldExcelFormatException;
import org.apache.poi.hssf.record.RecordInputStream;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.LocaleUtil;
import org.apache.poi.util.RecordFormatException;
import org.junit.BeforeClass;
EXCLUDED.put("testEXCEL_5.xls", OldExcelFormatException.class); // Biff 5 / Excel 5
EXCLUDED.put("60284.xls", OldExcelFormatException.class); // Biff 5 / Excel 5
EXCLUDED.put("testEXCEL_95.xls", OldExcelFormatException.class); // Biff 5 / Excel 95
- EXCLUDED.put("60284.xls", OldExcelFormatException.class); // Biff 5 / Excel 95
EXCLUDED.put("43493.xls", RecordInputStream.LeftoverDataException.class); // HSSFWorkbook cannot open it as well
// EXCLUDED.put("44958_1.xls", RecordInputStream.LeftoverDataException.class);
EXCLUDED.put("50833.xls", IllegalArgumentException.class); // "Name is too long" when setting username
@Override
void runOneFile(File fileIn) throws IOException {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(fileIn, true);
- try {
- InputStream is = BiffViewer.getPOIFSInputStream(fs);
- try {
- // use a NullOutputStream to not write the bytes anywhere for best runtime
- PrintWriter dummy = new PrintWriter(new OutputStreamWriter(NULL_OUTPUT_STREAM, LocaleUtil.CHARSET_1252));
- BiffViewer.runBiffViewer(dummy, is, true, true, true, false);
- } finally {
- is.close();
- }
- } finally {
- fs.close();
+ try (POIFSFileSystem fs = new POIFSFileSystem(fileIn, true);
+ InputStream is = BiffViewer.getPOIFSInputStream(fs)) {
+ // use a NullOutputStream to not write the bytes anywhere for best runtime
+ PrintWriter dummy = new PrintWriter(new OutputStreamWriter(NULL_OUTPUT_STREAM, LocaleUtil.CHARSET_1252));
+ BiffViewer.runBiffViewer(dummy, is, true, true, true, false);
}
}
import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.POIDataSamples;
import org.apache.poi.hssf.HSSFTestDataSamples;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
import org.apache.poi.util.RecordFormatException;
import org.junit.Ignore;
@Test
public void testNPOIFSFileSystem() throws IOException {
File file = HSSFTestDataSamples.getSampleFile("FormulaRefs.xls");
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(file)) {
+ try (POIFSFileSystem fs = new POIFSFileSystem(file)) {
OldExcelExtractor extractor = new OldExcelExtractor(fs);
extractor.close();
}
@Test
public void testDirectoryNode() throws IOException {
File file = HSSFTestDataSamples.getSampleFile("FormulaRefs.xls");
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(file)) {
+ try (POIFSFileSystem fs = new POIFSFileSystem(file)) {
OldExcelExtractor extractor = new OldExcelExtractor(fs.getRoot());
extractor.close();
}
@Test
public void testDirectoryNodeInvalidFile() throws IOException {
File file = POIDataSamples.getDocumentInstance().getFile("test.doc");
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(file)) {
+ try (POIFSFileSystem fs = new POIFSFileSystem(file)) {
OldExcelExtractor extractor = new OldExcelExtractor(fs.getRoot());
extractor.close();
fail("Should catch exception here");
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.formula.ptg.AttrPtg;
import org.apache.poi.ss.formula.ptg.Ptg;
import org.apache.poi.ss.usermodel.CellType;
public final class TestRVA {
private static final String NEW_LINE = System.getProperty("line.separator");
- private static NPOIFSFileSystem poifs;
+ private static POIFSFileSystem poifs;
private static HSSFWorkbook workbook;
- private static HSSFSheet sheet;
-
- @Parameter(value = 0)
+
+ @Parameter(value = 0)
public HSSFCell formulaCell;
@Parameter(value = 1)
public String formula;
@Parameters(name="{1}")
public static Collection<Object[]> data() throws Exception {
- poifs = new NPOIFSFileSystem(HSSFTestDataSamples.getSampleFile("testRVA.xls"), true);
+ poifs = new POIFSFileSystem(HSSFTestDataSamples.getSampleFile("testRVA.xls"), true);
workbook = new HSSFWorkbook(poifs);
- sheet = workbook.getSheetAt(0);
+ HSSFSheet sheet = workbook.getSheetAt(0);
List<Object[]> data = new ArrayList<>();
}
}
boolean hasMismatch = false;
- StringBuffer sb = new StringBuffer();
+ StringBuilder sb = new StringBuilder();
for (int i = 0; i < nExcelTokens; i++) {
Ptg poiPtg = poiPtgs[i];
Ptg excelPtg = excelPtgs[i];
if (excelPtg.getClass() != poiPtg.getClass()) {
hasMismatch = true;
- sb.append(" mismatch token type[" + i + "] " + getShortClassName(excelPtg) + " "
- + excelPtg.getRVAType() + " - " + getShortClassName(poiPtg) + " "
- + poiPtg.getRVAType());
+ sb.append(" mismatch token type[").append(i).append("] ").append(getShortClassName(excelPtg)).append(" ").append(excelPtg.getRVAType()).append(" - ").append(getShortClassName(poiPtg)).append(" ").append(poiPtg.getRVAType());
sb.append(NEW_LINE);
continue;
}
if (poiPtg.isBaseToken()) {
continue;
}
- sb.append(" token[" + i + "] " + excelPtg + " "
- + excelPtg.getRVAType());
+ sb.append(" token[").append(i).append("] ").append(excelPtg).append(" ").append(excelPtg.getRVAType());
if (excelPtg.getPtgClass() != poiPtg.getPtgClass()) {
hasMismatch = true;
- sb.append(" - was " + poiPtg.getRVAType());
+ sb.append(" - was ").append(poiPtg.getRVAType());
}
sb.append(NEW_LINE);
}
-// if (false) { // set 'true' to see trace of RVA values
-// System.out.println(formulaCell.getRowIndex() + " " + formula);
-// System.out.println(sb.toString());
-// }
assertFalse(hasMismatch);
}
import org.apache.poi.hssf.eventusermodel.HSSFRequest;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Test;
public final class TestChartTitleFormatRecord {
@Test
public void testRecord() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(
+ POIFSFileSystem fs = new POIFSFileSystem(
HSSFTestDataSamples.getSampleFile("WithFormattedGraphTitle.xls"));
// Check we can open the file via usermodel
private static final class ChartTitleFormatRecordGrabber implements HSSFListener {
private final List<ChartTitleFormatRecord> chartTitleFormatRecords;
- public ChartTitleFormatRecordGrabber() {
+ ChartTitleFormatRecordGrabber() {
chartTitleFormatRecords = new ArrayList<>();
}
import org.apache.poi.hssf.record.crypto.Biff8EncryptionKey;
import org.apache.poi.poifs.filesystem.DocumentEntry;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.formula.ptg.Area3DPtg;
import org.apache.poi.ss.formula.ptg.DeletedArea3DPtg;
));
}
try {
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(
+ try (POIFSFileSystem fs = new POIFSFileSystem(
HSSFITestDataProvider.instance.openWorkbookStream("46904.xls"))) {
new HSSFWorkbook(fs.getRoot(), false).close();
fail("Should catch exception here");
HSSFWorkbook wbPOIFS = new HSSFWorkbook(new POIFSFileSystem(
new ByteArrayInputStream(data)).getRoot(), false);
- HSSFWorkbook wbNPOIFS = new HSSFWorkbook(new NPOIFSFileSystem(
+ HSSFWorkbook wbNPOIFS = new HSSFWorkbook(new POIFSFileSystem(
new ByteArrayInputStream(data)).getRoot(), false);
assertEquals(2, wbPOIFS.getNumberOfSheets());
HSSFWorkbook wbPOIFS = new HSSFWorkbook(new POIFSFileSystem(
new ByteArrayInputStream(data)).getRoot(), false);
- HSSFWorkbook wbNPOIFS = new HSSFWorkbook(new NPOIFSFileSystem(
+ HSSFWorkbook wbNPOIFS = new HSSFWorkbook(new POIFSFileSystem(
new ByteArrayInputStream(data)).getRoot(), false);
for (HSSFWorkbook wb : new HSSFWorkbook[]{wbPOIFS, wbNPOIFS}) {
@Test(expected = RuntimeException.class)
public void test61300() throws Exception {
- NPOIFSFileSystem npoifs = new NPOIFSFileSystem(HSSFTestDataSamples.openSampleFileStream("61300.xls"));
+ POIFSFileSystem npoifs = new POIFSFileSystem(HSSFTestDataSamples.openSampleFileStream("61300.xls"));
DocumentEntry entry =
(DocumentEntry) npoifs.getRoot().getEntry(SummaryInformation.DEFAULT_STREAM_NAME);
import org.apache.poi.hssf.record.WindowOneRecord;
import org.apache.poi.poifs.filesystem.DirectoryEntry;
import org.apache.poi.poifs.filesystem.DirectoryNode;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.formula.ptg.Area3DPtg;
import org.apache.poi.ss.usermodel.BaseTestWorkbook;
/**
* Tests that we can work with both {@link POIFSFileSystem}
- * and {@link NPOIFSFileSystem}
+ * and {@link POIFSFileSystem}
*/
@Test
public void differentPOIFS() throws Exception {
DirectoryNode[] files = new DirectoryNode[2];
try (POIFSFileSystem poifsFileSystem = new POIFSFileSystem(HSSFTestDataSamples.openSampleFileStream("Simple.xls"))) {
files[0] = poifsFileSystem.getRoot();
- try (NPOIFSFileSystem npoifsFileSystem = new NPOIFSFileSystem(HSSFTestDataSamples.getSampleFile("Simple.xls"))) {
+ try (POIFSFileSystem npoifsFileSystem = new POIFSFileSystem(HSSFTestDataSamples.getSampleFile("Simple.xls"))) {
files[1] = npoifsFileSystem.getRoot();
// Open without preserving nodes
DirectoryNode[] files = new DirectoryNode[2];
try (POIFSFileSystem poifsFileSystem = new POIFSFileSystem(HSSFTestDataSamples.openSampleFileStream("WithEmbeddedObjects.xls"))) {
files[0] = poifsFileSystem.getRoot();
- try (NPOIFSFileSystem npoifsFileSystem = new NPOIFSFileSystem(HSSFTestDataSamples.getSampleFile("WithEmbeddedObjects.xls"))) {
+ try (POIFSFileSystem npoifsFileSystem = new POIFSFileSystem(HSSFTestDataSamples.getSampleFile("WithEmbeddedObjects.xls"))) {
files[1] = npoifsFileSystem.getRoot();
// Check the embedded parts
@Test
public void writeWorkbookFromNPOIFS() throws IOException {
try (InputStream is = HSSFTestDataSamples.openSampleFileStream("WithEmbeddedObjects.xls");
- NPOIFSFileSystem fs = new NPOIFSFileSystem(is)) {
+ POIFSFileSystem fs = new POIFSFileSystem(is)) {
// Start as NPOIFS
HSSFWorkbook wb = new HSSFWorkbook(fs.getRoot(), true);
assertEquals(3, wb.getNumberOfSheets());
// edit the workbook
{
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(file, false)) {
+ try (POIFSFileSystem fs = new POIFSFileSystem(file, false)) {
DirectoryNode root = fs.getRoot();
final Workbook workbook = new HSSFWorkbook(root, true);
final Sheet sheet = workbook.getSheet("foo");
assertCloseDoesNotModifyFile(filename, wb);
// File via NPOIFileStream (java.nio)
- wb = new HSSFWorkbook(new NPOIFSFileSystem(file));
+ wb = new HSSFWorkbook(new POIFSFileSystem(file));
assertCloseDoesNotModifyFile(filename, wb);
// InputStream
wb.close();
// Can't work for Read-Only files
- NPOIFSFileSystem fs = new NPOIFSFileSystem(
+ POIFSFileSystem fs = new POIFSFileSystem(
POIDataSamples.getSpreadSheetInstance().getFile("SampleSS.xls"), true);
wb = new HSSFWorkbook(fs);
try {
}
// Open from the temp file in read-write mode
- HSSFWorkbook wb = new HSSFWorkbook(new NPOIFSFileSystem(file, false));
+ HSSFWorkbook wb = new HSSFWorkbook(new POIFSFileSystem(file, false));
assertEquals(3, wb.getNumberOfSheets());
// Change
wb.write();
wb.close();
- wb = new HSSFWorkbook(new NPOIFSFileSystem(file));
+ wb = new HSSFWorkbook(new POIFSFileSystem(file));
assertEquals(1, wb.getNumberOfSheets());
assertEquals("Changed!", wb.getSheetAt(0).getRow(0).getCell(0).toString());
wb.close();
// Read and check
- wb = new HSSFWorkbook(new NPOIFSFileSystem(file));
+ wb = new HSSFWorkbook(new POIFSFileSystem(file));
assertEquals(3, wb.getNumberOfSheets());
wb.close();
}
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import org.apache.poi.hssf.HSSFTestDataSamples;
+import org.apache.poi.hssf.record.FilePassRecord;
import org.apache.poi.hssf.record.crypto.Biff8EncryptionKey;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.HexRead;
-import org.junit.After;
+import org.junit.Ignore;
import org.junit.Test;
public class TestXorEncryption {
private static final HSSFTestDataSamples samples = new HSSFTestDataSamples();
@Test
- public void testXorEncryption() throws IOException {
+ public void testXorEncryption() {
// Xor-Password: abc
// 2.5.343 XORObfuscation
// key = 20810
public void testUserFile() throws IOException {
File f = samples.getSampleFile("xor-encryption-abc.xls");
Biff8EncryptionKey.setCurrentUserPassword("abc");
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(f, true);
+ try (POIFSFileSystem fs = new POIFSFileSystem(f, true);
HSSFWorkbook hwb = new HSSFWorkbook(fs.getRoot(), true)) {
HSSFSheet sh = hwb.getSheetAt(0);
assertEquals(1.0, sh.getRow(0).getCell(0).getNumericCellValue(), 0.0);
Biff8EncryptionKey.setCurrentUserPassword(null);
}
}
+
+ @Test
+ @Ignore("currently not supported")
+ public void encrypt() throws IOException {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ try {
+ try (HSSFWorkbook hwb = HSSFTestDataSamples.openSampleWorkbook("SampleSS.xls")) {
+ Biff8EncryptionKey.setCurrentUserPassword("abc");
+ hwb.getInternalWorkbook().getWorkbookRecordList()
+ .add(1, new FilePassRecord(EncryptionMode.xor));
+ hwb.write(bos);
+ }
+ try (HSSFWorkbook hwb = new HSSFWorkbook(new ByteArrayInputStream(bos.toByteArray()))) {
+ assertEquals(3, hwb.getNumberOfSheets());
+ }
+ } finally {
+ Biff8EncryptionKey.setCurrentUserPassword(null);
+ }
+ }
}
package org.apache.poi.poifs.dev;
import org.apache.poi.hssf.HSSFTestDataSamples;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.NotOLE2FileException;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
-import org.apache.poi.poifs.property.NPropertyTable;
+import org.apache.poi.poifs.property.PropertyTable;
import org.apache.poi.util.TempFile;
import org.junit.After;
import org.junit.AfterClass;
SYSTEM = System.out;
System.setOut(new PrintStream(new OutputStream() {
@Override
- public void write(int b) throws IOException {
+ public void write(int b) {
}
}, false, "UTF-8"));
}
}
- public static void cleanDirectory(File directory) throws IOException {
+ private static void cleanDirectory(File directory) throws IOException {
if (!directory.isDirectory()) {
String message = directory + " is not a directory";
throw new IllegalArgumentException(message);
}
}
- public static void forceDelete(File file) throws IOException {
+ private static void forceDelete(File file) throws IOException {
if (file.isDirectory()) {
deleteDirectory(file);
} else {
assertTrue("Had: " + dir, dir.mkdirs());
FileInputStream is = new FileInputStream(TEST_FILE);
- NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
+ POIFSFileSystem fs = new POIFSFileSystem(is);
is.close();
- NPropertyTable props = fs.getPropertyTable();
+ PropertyTable props = fs.getPropertyTable();
assertNotNull(props);
// try with an invalid startBlock to trigger an exception
, TestOfficeXMLException.class
, TestPOIFSDocumentPath.class
, TestPOIFSFileSystem.class
- , TestNPOIFSFileSystem.class
, TestPropertySorter.class
, TestOle10Native.class
})
@Test
public void testNPOIFSDocument() throws IOException {
- try (NPOIFSFileSystem poifs = new NPOIFSFileSystem()) {
+ try (POIFSFileSystem poifs = new POIFSFileSystem()) {
// verify correct number of blocks get created for document
// that is exact multiple of block size
// verify that output is correct
- NPOIFSDocument document = checkDocument(poifs, LARGER_BIG_BLOCK_SIZE + 1);
+ POIFSDocument document = checkDocument(poifs, LARGER_BIG_BLOCK_SIZE + 1);
DocumentProperty property = document.getDocumentProperty();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
}
}
- private static NPOIFSDocument checkDocument(final NPOIFSFileSystem poifs, final int size) throws IOException {
+ private static POIFSDocument checkDocument(final POIFSFileSystem poifs, final int size) throws IOException {
final byte[] input = new byte[size];
IntStream.range(0, size).forEach(i -> input[i] = (byte)i);
- NPOIFSDocument document = ((DocumentNode)poifs.createDocument(
+ POIFSDocument document = ((DocumentNode)poifs.createDocument(
new SlowInputStream(new ByteArrayInputStream(input)),
"entry"+poifs.getRoot().getEntryCount())).getDocument();
final int blockCount = (size + (blockSize-1)) / blockSize;
final byte[] bytCpy = checkValues(blockCount, document, input);
- final NPOIFSDocument copied = makeCopy(document,bytCpy);
+ final POIFSDocument copied = makeCopy(document,bytCpy);
checkValues(blockCount, copied, input);
return document;
}
- private static NPOIFSDocument makeCopy(NPOIFSDocument document, byte[] input) throws IOException {
- NPOIFSFileSystem poifs = document.getFileSystem();
+ private static POIFSDocument makeCopy(POIFSDocument document, byte[] input) throws IOException {
+ POIFSFileSystem poifs = document.getFileSystem();
String name = "test" + input.length;
DirectoryNode root = poifs.getRoot();
if (root.hasEntry(name)) {
.getDocument();
}
- private static byte[] checkValues(final int blockCountExp, NPOIFSDocument document, byte[] input) throws IOException {
+ private static byte[] checkValues(final int blockCountExp, POIFSDocument document, byte[] input) throws IOException {
assertNotNull(document);
assertNotNull(document.getDocumentProperty().getDocument());
assertEquals(document, document.getDocumentProperty().getDocument());
byte[] _workbook_data_only = new byte[_workbook_size];
System.arraycopy(_workbook_data, 0, _workbook_data_only, 0, _workbook_size);
- NPOIFSFileSystem npoifs = new NPOIFSFileSystem();
+ POIFSFileSystem npoifs = new POIFSFileSystem();
// Make it easy when debugging to see what isn't the doc
byte[] minus1 = new byte[512];
Arrays.fill(minus1, (byte) -1);
*/
@Test
public void testConstructor() throws IOException {
- try (DocumentInputStream nstream = new NDocumentInputStream(_workbook_n)) {
+ try (DocumentInputStream nstream = new DocumentInputStream(_workbook_n)) {
assertEquals(_workbook_size, _workbook_n.getSize());
assertEquals(_workbook_size, available(nstream));
}
*/
@Test(expected = IllegalStateException.class)
public void testAvailable() throws IOException {
- DocumentInputStream nstream = new NDocumentInputStream(_workbook_n);
+ DocumentInputStream nstream = new DocumentInputStream(_workbook_n);
assertEquals(_workbook_size, available(nstream));
nstream.close();
byte[] buffer = new byte[_workbook_size / 5];
byte[] small_buffer = new byte[212];
- DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
+ DocumentInputStream stream = new DocumentInputStream(_workbook_n);
// Read a fifth of it, and check all's correct
stream.read(buffer);
for (int j = 0; j < buffer.length; j++) {
}
// Now repeat it with spanning multiple blocks
- stream = new NDocumentInputStream(_workbook_n);
+ stream = new DocumentInputStream(_workbook_n);
// Read several blocks work
buffer = new byte[_workbook_size / 5];
stream.read(buffer);
@SuppressWarnings("ResultOfMethodCallIgnored")
@Test(expected = IOException.class)
public void testReadSingleByte() throws IOException {
- DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
+ DocumentInputStream stream = new DocumentInputStream(_workbook_n);
int remaining = _workbook_size;
// Try and read each byte in turn
@SuppressWarnings("ResultOfMethodCallIgnored")
@Test
public void testBufferRead() throws IOException {
- DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
+ DocumentInputStream stream = new DocumentInputStream(_workbook_n);
// Need to give a byte array to read
try {
stream.read(null);
@SuppressWarnings("ResultOfMethodCallIgnored")
@Test
public void testComplexBufferRead() throws IOException {
- DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
+ DocumentInputStream stream = new DocumentInputStream(_workbook_n);
try {
stream.read(null, 0, 1);
fail("Should have caught NullPointerException");
*/
@Test
public void testSkip() throws IOException {
- DocumentInputStream stream = new NDocumentInputStream(_workbook_n);
+ DocumentInputStream stream = new DocumentInputStream(_workbook_n);
assertEquals(_workbook_size, available(stream));
int count = available(stream);
DocumentInputStream stream;
- try (NPOIFSFileSystem npoifs = new NPOIFSFileSystem(sample)) {
+ try (POIFSFileSystem npoifs = new POIFSFileSystem(sample)) {
// Ensure we have what we expect on the root
assertEquals(npoifs, npoifs.getRoot().getNFileSystem());
assertEquals(npoifs, npoifs.getRoot().getFileSystem());
package org.apache.poi.poifs.filesystem;
-import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.fail;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
-import java.util.Arrays;
+import org.apache.poi.util.IOUtils;
import org.junit.Test;
/**
*/
@Test
public void testWrite1() throws IOException {
- ByteArrayOutputStream stream = new ByteArrayOutputStream();
- DocumentOutputStream dstream = new DocumentOutputStream(stream, 25);
+ final byte[] expected = data(25);
- for (int j = 0; j < 25; j++)
- {
- dstream.write(j);
- }
- try
- {
- dstream.write(0);
- fail("Should have caught IOException");
- }
- catch (IOException ignored)
- {
- }
- byte[] output = stream.toByteArray();
+ POIFSWriterListener l = (event) -> {
+ DocumentOutputStream dstream = event.getStream();
- assertEquals(25, output.length);
- for (int j = 0; j < 25; j++)
- {
- assertEquals(( byte ) j, output[ j ]);
- }
- dstream.close();
- stream.close();
+ try {
+ for (byte b : expected) {
+ dstream.write((int)b);
+ }
+ } catch (IOException ignored) {
+ fail("stream exhausted too early");
+ }
+
+ try {
+ dstream.write(0);
+ fail("Should have caught IOException");
+ }
+ catch (IOException ignored) {
+ }
+ };
+
+ compare(l, expected);
}
/**
*/
@Test
public void testWrite2() throws IOException {
- ByteArrayOutputStream stream = new ByteArrayOutputStream();
- DocumentOutputStream dstream = new DocumentOutputStream(stream, 25);
+ final byte[] expected = data(24);
- for (int j = 0; j < 6; j++)
- {
- byte[] array = new byte[ 4 ];
+ POIFSWriterListener l = (event) -> {
+ DocumentOutputStream dstream = event.getStream();
- Arrays.fill(array, ( byte ) j);
- dstream.write(array);
- }
- try
- {
- byte[] array = new byte[ 4 ];
+ try {
+ dstream.write(expected);
+ } catch (IOException ignored) {
+ fail("stream exhausted too early");
+ }
- Arrays.fill(array, ( byte ) 7);
- dstream.write(array);
- fail("Should have caught IOException");
- }
- catch (IOException ignored)
- {
- }
- byte[] output = stream.toByteArray();
-
- assertEquals(24, output.length);
- for (int j = 0; j < 6; j++)
- {
- for (int k = 0; k < 4; k++)
- {
- assertEquals(String.valueOf((j * 4) + k), ( byte ) j,
- output[ (j * 4) + k ]);
+ try {
+ dstream.write(new byte[]{'7','7','7','7'});
+ fail("Should have caught IOException");
+ } catch (IOException ignored) {
}
- }
- dstream.close();
- stream.close();
+ };
+
+ compare(l, expected);
}
/**
*/
@Test
public void testWrite3() throws IOException {
- ByteArrayOutputStream stream = new ByteArrayOutputStream();
- DocumentOutputStream dstream = new DocumentOutputStream(stream, 25);
- byte[] array = new byte[ 50 ];
+ byte[] input = data(50);
+ byte[] expected = new byte[25];
+ System.arraycopy(input, 1, expected, 0, 25);
+
+ POIFSWriterListener l = (event) -> {
+ DocumentOutputStream dstream = event.getStream();
+ try {
+ dstream.write(input, 1, 25);
+ } catch (IOException ignored) {
+ fail("stream exhausted too early");
+ }
+ try {
+ dstream.write(input, 0, 1);
+ fail("Should have caught IOException");
+ }
+ catch (IOException ignored) {}
+ };
- for (int j = 0; j < 50; j++)
- {
- array[ j ] = ( byte ) j;
- }
- dstream.write(array, 1, 25);
- try
- {
- dstream.write(array, 0, 1);
- fail("Should have caught IOException");
- }
- catch (IOException ignored)
- {
- }
- byte[] output = stream.toByteArray();
+ compare(l, expected);
+ }
- assertEquals(25, output.length);
- for (int j = 0; j < 25; j++)
- {
- assertEquals(( byte ) (j + 1), output[ j ]);
+ private static byte[] data(int len) {
+ byte[] input = new byte[len];
+ for (int i = 0; i < len; i++) {
+ input[i] = (byte)('0' + (i%10));
}
- dstream.close();
- stream.close();
+ return input;
}
- /**
- * test writeFiller()
- */
- @Test
- public void testWriteFiller() throws IOException {
- ByteArrayOutputStream stream = new ByteArrayOutputStream();
- DocumentOutputStream dstream = new DocumentOutputStream(stream, 25);
+ private void compare(POIFSWriterListener l, byte[] expected) throws IOException {
+ try (POIFSFileSystem poifs = new POIFSFileSystem()) {
+ DirectoryNode root = poifs.getRoot();
+ root.createDocument("foo", expected.length, l);
- for (int j = 0; j < 25; j++)
- {
- dstream.write(j);
- }
- try
- {
- dstream.write(0);
- fail("Should have caught IOException");
- }
- catch (IOException ignored)
- {
- }
- dstream.writeFiller(100, ( byte ) 0xff);
- byte[] output = stream.toByteArray();
-
- assertEquals(100, output.length);
- for (int j = 0; j < 25; j++)
- {
- assertEquals(( byte ) j, output[ j ]);
- }
- for (int j = 25; j < 100; j++)
- {
- assertEquals(String.valueOf(j), ( byte ) 0xff, output[ j ]);
+ try (DocumentInputStream is = root.createDocumentInputStream("foo")) {
+ final ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length);
+ IOUtils.copy(is, bos);
+ assertArrayEquals(expected, bos.toByteArray());
+ }
}
- dstream.close();
- stream.close();
}
}
package org.apache.poi.poifs.filesystem;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
// Names must match
- assertEquals(false, entryA1.getName().equals(entryA1b.getName()));
- assertEquals(false, EntryUtils.areDocumentsIdentical(entryA1, entryA1b));
+ assertNotEquals(entryA1.getName(), entryA1b.getName());
+ assertFalse(EntryUtils.areDocumentsIdentical(entryA1, entryA1b));
// Contents must match
- assertEquals(false, EntryUtils.areDocumentsIdentical(entryA1, entryA2));
+ assertFalse(EntryUtils.areDocumentsIdentical(entryA1, entryA2));
// Parents don't matter if contents + names are the same
- assertEquals(false, entryA1.getParent().equals(entryB1.getParent()));
- assertEquals(true, EntryUtils.areDocumentsIdentical(entryA1, entryB1));
+ assertNotEquals(entryA1.getParent(), entryB1.getParent());
+ assertTrue(EntryUtils.areDocumentsIdentical(entryA1, entryB1));
// Can work with NPOIFS + POIFS
fs.writeFilesystem(tmpO);
ByteArrayInputStream tmpI = new ByteArrayInputStream(tmpO.toByteArray());
- NPOIFSFileSystem nfs = new NPOIFSFileSystem(tmpI);
+ POIFSFileSystem nfs = new POIFSFileSystem(tmpI);
DirectoryEntry dN1 = (DirectoryEntry)nfs.getRoot().getEntry("DirA");
DirectoryEntry dN2 = (DirectoryEntry)nfs.getRoot().getEntry("DirB");
DocumentEntry eNA1 = (DocumentEntry)dN1.getEntry(entryA1.getName());
DocumentEntry eNA2 = (DocumentEntry)dN1.getEntry(entryA2.getName());
DocumentEntry eNB1 = (DocumentEntry)dN2.getEntry(entryB1.getName());
-
- assertEquals(false, EntryUtils.areDocumentsIdentical(eNA1, eNA2));
- assertEquals(true, EntryUtils.areDocumentsIdentical(eNA1, eNB1));
-
- assertEquals(false, EntryUtils.areDocumentsIdentical(eNA1, entryA1b));
- assertEquals(false, EntryUtils.areDocumentsIdentical(eNA1, entryA2));
-
- assertEquals(true, EntryUtils.areDocumentsIdentical(eNA1, entryA1));
- assertEquals(true, EntryUtils.areDocumentsIdentical(eNA1, entryB1));
+
+ assertFalse(EntryUtils.areDocumentsIdentical(eNA1, eNA2));
+ assertTrue(EntryUtils.areDocumentsIdentical(eNA1, eNB1));
+
+ assertFalse(EntryUtils.areDocumentsIdentical(eNA1, entryA1b));
+ assertFalse(EntryUtils.areDocumentsIdentical(eNA1, entryA2));
+
+ assertTrue(EntryUtils.areDocumentsIdentical(eNA1, entryA1));
+ assertTrue(EntryUtils.areDocumentsIdentical(eNA1, entryB1));
nfs.close();
fs.close();
}
DirectoryEntry dirB = fs.createDirectory("DirB");
// Names must match
- assertEquals(false, EntryUtils.areDirectoriesIdentical(dirA, dirB));
+ assertFalse(EntryUtils.areDirectoriesIdentical(dirA, dirB));
// Empty dirs are fine
DirectoryEntry dirA1 = dirA.createDirectory("TheDir");
DirectoryEntry dirB1 = dirB.createDirectory("TheDir");
assertEquals(0, dirA1.getEntryCount());
assertEquals(0, dirB1.getEntryCount());
- assertEquals(true, EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
+ assertTrue(EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
// Otherwise children must match
dirA1.createDocument("Entry1", new ByteArrayInputStream(dataSmallA));
- assertEquals(false, EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
+ assertFalse(EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
dirB1.createDocument("Entry1", new ByteArrayInputStream(dataSmallA));
- assertEquals(true, EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
+ assertTrue(EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
dirA1.createDirectory("DD");
- assertEquals(false, EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
+ assertFalse(EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
dirB1.createDirectory("DD");
- assertEquals(true, EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
+ assertTrue(EntryUtils.areDirectoriesIdentical(dirA1, dirB1));
// Excludes support
- List<String> excl = Arrays.asList(new String[] {"Ignore1", "IgnDir/Ign2"});
+ List<String> excl = Arrays.asList("Ignore1", "IgnDir/Ign2");
FilteringDirectoryNode fdA = new FilteringDirectoryNode(dirA1, excl);
FilteringDirectoryNode fdB = new FilteringDirectoryNode(dirB1, excl);
-
- assertEquals(true, EntryUtils.areDirectoriesIdentical(fdA, fdB));
+
+ assertTrue(EntryUtils.areDirectoriesIdentical(fdA, fdB));
// Add an ignored doc, no notice is taken
fdA.createDocument("Ignore1", new ByteArrayInputStream(dataSmallA));
- assertEquals(true, EntryUtils.areDirectoriesIdentical(fdA, fdB));
+ assertTrue(EntryUtils.areDirectoriesIdentical(fdA, fdB));
// Add a directory with filtered contents, not the same
DirectoryEntry dirAI = dirA1.createDirectory("IgnDir");
- assertEquals(false, EntryUtils.areDirectoriesIdentical(fdA, fdB));
+ assertFalse(EntryUtils.areDirectoriesIdentical(fdA, fdB));
DirectoryEntry dirBI = dirB1.createDirectory("IgnDir");
- assertEquals(true, EntryUtils.areDirectoriesIdentical(fdA, fdB));
+ assertTrue(EntryUtils.areDirectoriesIdentical(fdA, fdB));
// Add something to the filtered subdir that gets ignored
dirAI.createDocument("Ign2", new ByteArrayInputStream(dataSmallA));
- assertEquals(true, EntryUtils.areDirectoriesIdentical(fdA, fdB));
+ assertTrue(EntryUtils.areDirectoriesIdentical(fdA, fdB));
// And something that doesn't
dirAI.createDocument("IgnZZ", new ByteArrayInputStream(dataSmallA));
- assertEquals(false, EntryUtils.areDirectoriesIdentical(fdA, fdB));
+ assertFalse(EntryUtils.areDirectoriesIdentical(fdA, fdB));
dirBI.createDocument("IgnZZ", new ByteArrayInputStream(dataSmallA));
- assertEquals(true, EntryUtils.areDirectoriesIdentical(fdA, fdB));
+ assertTrue(EntryUtils.areDirectoriesIdentical(fdA, fdB));
fs.close();
}
private static POIDataSamples _samples = POIDataSamples.getPOIFSInstance();
private static POIDataSamples _ssSamples = POIDataSamples.getSpreadSheetInstance();
- private List<NPOIFSFileSystem> openedFSs;
+ private List<POIFSFileSystem> openedFSs;
@After
public void tearDown() {
if (openedFSs != null && !openedFSs.isEmpty()) {
- for (NPOIFSFileSystem fs : openedFSs) {
+ for (POIFSFileSystem fs : openedFSs) {
try {
fs.close();
} catch (Exception e) {
}
private DirectoryNode openSample(InputStream inps) throws Exception {
- NPOIFSFileSystem nfs = new NPOIFSFileSystem(inps);
+ POIFSFileSystem nfs = new POIFSFileSystem(inps);
if (openedFSs == null) {
openedFSs = new ArrayList<>();
}
fetchSizes("/", root, entries);
// Prepare to copy
- DirectoryNode dest = new NPOIFSFileSystem().getRoot();
+ DirectoryNode dest = new POIFSFileSystem().getRoot();
// Copy over
EntryUtils.copyNodes(root, dest);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
root.getNFileSystem().writeFilesystem(baos);
- NPOIFSFileSystem read = new NPOIFSFileSystem(
+ POIFSFileSystem read = new POIFSFileSystem(
new ByteArrayInputStream(baos.toByteArray()));
// Check the structure matches
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.filesystem;
-
-import static org.hamcrest.core.IsCollectionContaining.hasItem;
-import static org.hamcrest.core.IsEqual.equalTo;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-
-import org.apache.poi.POIDataSamples;
-import org.apache.poi.hpsf.DocumentSummaryInformation;
-import org.apache.poi.hpsf.PropertySet;
-import org.apache.poi.hpsf.PropertySetFactory;
-import org.apache.poi.hpsf.SummaryInformation;
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.property.DirectoryProperty;
-import org.apache.poi.poifs.property.NPropertyTable;
-import org.apache.poi.poifs.property.Property;
-import org.apache.poi.poifs.property.RootProperty;
-import org.apache.poi.poifs.storage.HeaderBlock;
-import org.apache.poi.util.IOUtils;
-import org.apache.poi.util.TempFile;
-import org.junit.Assume;
-import org.junit.Ignore;
-import org.junit.Test;
-
-/**
- * Tests for the new NIO POIFSFileSystem implementation
- */
-public final class TestNPOIFSFileSystem {
- private static final POIDataSamples _inst = POIDataSamples.getPOIFSInstance();
-
- /**
- * Returns test files with 512 byte and 4k block sizes, loaded
- * both from InputStreams and Files
- */
- private NPOIFSFileSystem[] get512and4kFileAndInput() throws IOException {
- NPOIFSFileSystem fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
- NPOIFSFileSystem fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- NPOIFSFileSystem fsC = new NPOIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
- NPOIFSFileSystem fsD = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
- return new NPOIFSFileSystem[] {fsA,fsB,fsC,fsD};
- }
-
- private static void assertBATCount(NPOIFSFileSystem fs, int expectedBAT, int expectedXBAT) throws IOException {
- int foundBAT = 0;
- int foundXBAT = 0;
- int sz = (int)(fs.size() / fs.getBigBlockSize());
- for (int i=0; i<sz; i++) {
- if(fs.getNextBlock(i) == POIFSConstants.FAT_SECTOR_BLOCK) {
- foundBAT++;
- }
- if(fs.getNextBlock(i) == POIFSConstants.DIFAT_SECTOR_BLOCK) {
- foundXBAT++;
- }
- }
- assertEquals("Wrong number of BATs", expectedBAT, foundBAT);
- assertEquals("Wrong number of XBATs with " + expectedBAT + " BATs", expectedXBAT, foundXBAT);
- }
- private void assertContentsMatches(byte[] expected, DocumentEntry doc) throws IOException {
- NDocumentInputStream inp = new NDocumentInputStream(doc);
- byte[] contents = new byte[doc.getSize()];
- assertEquals(doc.getSize(), inp.read(contents));
- inp.close();
-
- if (expected != null) {
- assertThat(expected, equalTo(contents));
- }
- }
-
- private static HeaderBlock writeOutAndReadHeader(NPOIFSFileSystem fs) throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- fs.writeFilesystem(baos);
-
- return new HeaderBlock(new ByteArrayInputStream(baos.toByteArray()));
- }
-
- static NPOIFSFileSystem writeOutAndReadBack(NPOIFSFileSystem original) throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- original.writeFilesystem(baos);
- return new NPOIFSFileSystem(new ByteArrayInputStream(baos.toByteArray()));
- }
-
- private static NPOIFSFileSystem writeOutFileAndReadBack(NPOIFSFileSystem original) throws IOException {
- final File file = TempFile.createTempFile("TestPOIFS", ".ole2");
- try (OutputStream fout = new FileOutputStream(file)) {
- original.writeFilesystem(fout);
- }
- return new NPOIFSFileSystem(file, false);
- }
-
- @Test
- public void basicOpen() throws IOException {
- NPOIFSFileSystem fsA, fsB;
-
- // With a simple 512 block file
- fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
- fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB}) {
- assertEquals(512, fs.getBigBlockSize());
- }
- fsA.close();
- fsB.close();
-
- // Now with a simple 4096 block file
- fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
- fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB}) {
- assertEquals(4096, fs.getBigBlockSize());
- }
- fsA.close();
- fsB.close();
- }
-
- @Test
- public void propertiesAndFatOnRead() throws IOException {
- NPOIFSFileSystem fsA, fsB;
-
- // With a simple 512 block file
- fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
- fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB}) {
- // Check the FAT was properly processed:
- // Verify we only got one block
- fs.getBATBlockAndIndex(0);
- fs.getBATBlockAndIndex(1);
- try {
- fs.getBATBlockAndIndex(140);
- fail("Should only be one BAT, but a 2nd was found");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
-
- // Verify a few next offsets
- // 97 -> 98 -> END
- assertEquals(98, fs.getNextBlock(97));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(98));
-
-
- // Check the properties
- NPropertyTable props = fs._get_property_table();
- assertEquals(90, props.getStartBlock());
- assertEquals(7, props.countBlocks());
-
- // Root property tells us about the Mini Stream
- RootProperty root = props.getRoot();
- assertEquals("Root Entry", root.getName());
- assertEquals(11564, root.getSize());
- assertEquals(0, root.getStartBlock());
-
- // Check its children too
- Property prop;
- Iterator<Property> pi = root.getChildren();
- prop = pi.next();
- assertEquals("Thumbnail", prop.getName());
- prop = pi.next();
- assertEquals("\u0005DocumentSummaryInformation", prop.getName());
- prop = pi.next();
- assertEquals("\u0005SummaryInformation", prop.getName());
- prop = pi.next();
- assertEquals("Image", prop.getName());
- prop = pi.next();
- assertEquals("Tags", prop.getName());
- assertFalse(pi.hasNext());
-
-
- // Check the SBAT (Small Blocks FAT) was properly processed
- NPOIFSMiniStore ministore = fs.getMiniStore();
-
- // Verify we only got two SBAT blocks
- ministore.getBATBlockAndIndex(0);
- ministore.getBATBlockAndIndex(128);
- try {
- ministore.getBATBlockAndIndex(256);
- fail("Should only be two SBATs, but a 3rd was found");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
-
- // Verify a few offsets: 0->50 is a stream
- for(int i=0; i<50; i++) {
- assertEquals(i+1, ministore.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(50));
-
- fs.close();
- }
-
- // Now with a simple 4096 block file
- fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
- fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB}) {
- // Check the FAT was properly processed
- // Verify we only got one block
- fs.getBATBlockAndIndex(0);
- fs.getBATBlockAndIndex(1);
- try {
- fs.getBATBlockAndIndex(1040);
- fail("Should only be one BAT, but a 2nd was found");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
-
- // Verify a few next offsets
- // 0 -> 1 -> 2 -> END
- assertEquals(1, fs.getNextBlock(0));
- assertEquals(2, fs.getNextBlock(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
-
-
- // Check the properties
- NPropertyTable props = fs._get_property_table();
- assertEquals(12, props.getStartBlock());
- assertEquals(1, props.countBlocks());
-
- // Root property tells us about the Mini Stream
- RootProperty root = props.getRoot();
- assertEquals("Root Entry", root.getName());
- assertEquals(11564, root.getSize());
- assertEquals(0, root.getStartBlock());
-
- // Check its children too
- Property prop;
- Iterator<Property> pi = root.getChildren();
- prop = pi.next();
- assertEquals("Thumbnail", prop.getName());
- prop = pi.next();
- assertEquals("\u0005DocumentSummaryInformation", prop.getName());
- prop = pi.next();
- assertEquals("\u0005SummaryInformation", prop.getName());
- prop = pi.next();
- assertEquals("Image", prop.getName());
- prop = pi.next();
- assertEquals("Tags", prop.getName());
- assertFalse(pi.hasNext());
-
-
- // Check the SBAT (Small Blocks FAT) was properly processed
- NPOIFSMiniStore ministore = fs.getMiniStore();
-
- // Verify we only got one SBAT block
- ministore.getBATBlockAndIndex(0);
- ministore.getBATBlockAndIndex(128);
- ministore.getBATBlockAndIndex(1023);
- try {
- ministore.getBATBlockAndIndex(1024);
- fail("Should only be one SBAT, but a 2nd was found");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
-
- // Verify a few offsets: 0->50 is a stream
- for(int i=0; i<50; i++) {
- assertEquals(i+1, ministore.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(50));
-
- fs.close();
- }
- }
-
- /**
- * Check that for a given block, we can correctly figure
- * out what the next one is
- */
- @Test
- public void nextBlock() throws IOException {
- NPOIFSFileSystem fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
- NPOIFSFileSystem fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB}) {
- // 0 -> 21 are simple
- for(int i=0; i<21; i++) {
- assertEquals(i+1, fs.getNextBlock(i));
- }
- // 21 jumps to 89, then ends
- assertEquals(89, fs.getNextBlock(21));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(89));
-
- // 22 -> 88 simple sequential stream
- for(int i=22; i<88; i++) {
- assertEquals(i+1, fs.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(88));
-
- // 90 -> 96 is another stream
- for(int i=90; i<96; i++) {
- assertEquals(i+1, fs.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(96));
-
- // 97+98 is another
- assertEquals(98, fs.getNextBlock(97));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(98));
-
- // 99 is our FAT block
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
-
- // 100 onwards is free
- for(int i=100; i<fs.getBigBlockSizeDetails().getBATEntriesPerBlock(); i++) {
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(i));
- }
-
- fs.close();
- }
-
- // Quick check on 4096 byte blocks too
- fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
- fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB}) {
- // 0 -> 1 -> 2 -> end
- assertEquals(1, fs.getNextBlock(0));
- assertEquals(2, fs.getNextBlock(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
-
- // 4 -> 11 then end
- for(int i=4; i<11; i++) {
- assertEquals(i+1, fs.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(11));
-
- fs.close();
- }
- }
-
- /**
- * Check we get the right data back for each block
- */
- @Test
- public void getBlock() throws IOException {
- NPOIFSFileSystem fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
- NPOIFSFileSystem fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB}) {
- ByteBuffer b;
-
- // The 0th block is the first data block
- b = fs.getBlockAt(0);
- assertEquals((byte)0x9e, b.get());
- assertEquals((byte)0x75, b.get());
- assertEquals((byte)0x97, b.get());
- assertEquals((byte)0xf6, b.get());
-
- // And the next block
- b = fs.getBlockAt(1);
- assertEquals((byte)0x86, b.get());
- assertEquals((byte)0x09, b.get());
- assertEquals((byte)0x22, b.get());
- assertEquals((byte)0xfb, b.get());
-
- // Check the final block too
- b = fs.getBlockAt(99);
- assertEquals((byte)0x01, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x02, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
-
- fs.close();
- }
-
- // Quick check on 4096 byte blocks too
- fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
- fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB}) {
- ByteBuffer b;
-
- // The 0th block is the first data block
- b = fs.getBlockAt(0);
- assertEquals((byte)0x9e, b.get());
- assertEquals((byte)0x75, b.get());
- assertEquals((byte)0x97, b.get());
- assertEquals((byte)0xf6, b.get());
-
- // And the next block
- b = fs.getBlockAt(1);
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x03, b.get());
- assertEquals((byte)0x00, b.get());
-
- // The 14th block is the FAT
- b = fs.getBlockAt(14);
- assertEquals((byte)0x01, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x02, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
-
- fs.close();
- }
- }
-
- /**
- * Ask for free blocks where there are some already
- * to be had from the FAT
- */
- @Test
- public void getFreeBlockWithSpare() throws IOException {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
-
- // Our first BAT block has spares
- assertTrue(fs.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
-
- // First free one is 100
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(100));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(101));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(102));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(103));
-
- // Ask, will get 100
- assertEquals(100, fs.getFreeBlock());
-
- // Ask again, will still get 100 as not written to
- assertEquals(100, fs.getFreeBlock());
-
- // Allocate it, then ask again
- fs.setNextBlock(100, POIFSConstants.END_OF_CHAIN);
- assertEquals(101, fs.getFreeBlock());
-
- // All done
- fs.close();
- }
-
- /**
- * Ask for free blocks where no free ones exist, and so the
- * file needs to be extended and another BAT/XBAT added
- */
- @Test
- public void getFreeBlockWithNoneSpare() throws IOException {
- NPOIFSFileSystem fs1 = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- int free;
-
- // We have one BAT at block 99
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(99));
- assertBATCount(fs1, 1, 0);
-
- // We've spare ones from 100 to 128
- for(int i=100; i<128; i++) {
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(i));
- }
-
- // Check our BAT knows it's free
- assertTrue(fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
-
- // Allocate all the spare ones
- for(int i=100; i<128; i++) {
- fs1.setNextBlock(i, POIFSConstants.END_OF_CHAIN);
- }
-
- // BAT is now full, but there's only the one
- assertFalse(fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
- try {
- assertFalse(fs1.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
- fail("Should only be one BAT");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
- assertBATCount(fs1, 1, 0);
-
-
- // Now ask for a free one, will need to extend the file
- assertEquals(129, fs1.getFreeBlock());
-
- assertFalse(fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
- assertTrue(fs1.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(128));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(129));
-
- // We now have 2 BATs, but no XBATs
- assertBATCount(fs1, 2, 0);
-
-
- // Fill up to hold 109 BAT blocks
- for(int i=0; i<109; i++) {
- fs1.getFreeBlock();
- int startOffset = i*128;
- while( fs1.getBATBlockAndIndex(startOffset).getBlock().hasFreeSectors() ) {
- free = fs1.getFreeBlock();
- fs1.setNextBlock(free, POIFSConstants.END_OF_CHAIN);
- }
- }
-
- assertFalse(fs1.getBATBlockAndIndex(109 * 128 - 1).getBlock().hasFreeSectors());
- try {
- assertFalse(fs1.getBATBlockAndIndex(109 * 128).getBlock().hasFreeSectors());
- fail("Should only be 109 BATs");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
-
- // We now have 109 BATs, but no XBATs
- assertBATCount(fs1, 109, 0);
-
-
- // Ask for it to be written out, and check the header
- HeaderBlock header = writeOutAndReadHeader(fs1);
- assertEquals(109, header.getBATCount());
- assertEquals(0, header.getXBATCount());
-
-
- // Ask for another, will get our first XBAT
- free = fs1.getFreeBlock();
- assertTrue("Had: " + free, free > 0);
-
- assertFalse(fs1.getBATBlockAndIndex(109 * 128 - 1).getBlock().hasFreeSectors());
- assertTrue(fs1.getBATBlockAndIndex(110 * 128 - 1).getBlock().hasFreeSectors());
- try {
- assertFalse(fs1.getBATBlockAndIndex(110 * 128).getBlock().hasFreeSectors());
- fail("Should only be 110 BATs");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
- assertBATCount(fs1, 110, 1);
-
- header = writeOutAndReadHeader(fs1);
- assertEquals(110, header.getBATCount());
- assertEquals(1, header.getXBATCount());
-
-
- // Fill the XBAT, which means filling 127 BATs
- for(int i=109; i<109+127; i++) {
- fs1.getFreeBlock();
- int startOffset = i*128;
- while( fs1.getBATBlockAndIndex(startOffset).getBlock().hasFreeSectors() ) {
- free = fs1.getFreeBlock();
- fs1.setNextBlock(free, POIFSConstants.END_OF_CHAIN);
- }
- assertBATCount(fs1, i+1, 1);
- }
-
- // Should now have 109+127 = 236 BATs
- assertFalse(fs1.getBATBlockAndIndex(236 * 128 - 1).getBlock().hasFreeSectors());
- try {
- assertFalse(fs1.getBATBlockAndIndex(236 * 128).getBlock().hasFreeSectors());
- fail("Should only be 236 BATs");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
- assertBATCount(fs1, 236, 1);
-
-
- // Ask for another, will get our 2nd XBAT
- free = fs1.getFreeBlock();
- assertTrue("Had: " + free, free > 0);
-
- assertFalse(fs1.getBATBlockAndIndex(236 * 128 - 1).getBlock().hasFreeSectors());
- assertTrue(fs1.getBATBlockAndIndex(237 * 128 - 1).getBlock().hasFreeSectors());
- try {
- assertFalse(fs1.getBATBlockAndIndex(237 * 128).getBlock().hasFreeSectors());
- fail("Should only be 237 BATs");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
-
-
- // Check the counts now
- assertBATCount(fs1, 237, 2);
-
- // Check the header
- header = writeOutAndReadHeader(fs1);
- assertNotNull(header);
-
- // Now, write it out, and read it back in again fully
- NPOIFSFileSystem fs2 = writeOutAndReadBack(fs1);
- fs1.close();
-
- // Check that it is seen correctly
- assertBATCount(fs2, 237, 2);
-
- assertFalse(fs2.getBATBlockAndIndex(236 * 128 - 1).getBlock().hasFreeSectors());
- assertTrue(fs2.getBATBlockAndIndex(237 * 128 - 1).getBlock().hasFreeSectors());
- try {
- assertFalse(fs2.getBATBlockAndIndex(237 * 128).getBlock().hasFreeSectors());
- fail("Should only be 237 BATs");
- } catch(IndexOutOfBoundsException e) {
- // expected here
- }
-
-
- // All done
- fs2.close();
- }
-
- /**
- * Test that we can correctly get the list of directory
- * entries, and the details on the files in them
- */
- @Test
- public void listEntries() throws IOException {
- for(NPOIFSFileSystem fs : get512and4kFileAndInput()) {
- DirectoryEntry root = fs.getRoot();
- assertEquals(5, root.getEntryCount());
-
- // Check by the names
- Entry thumbnail = root.getEntry("Thumbnail");
- Entry dsi = root.getEntry("\u0005DocumentSummaryInformation");
- Entry si = root.getEntry("\u0005SummaryInformation");
- Entry image = root.getEntry("Image");
- Entry tags = root.getEntry("Tags");
-
- assertFalse(thumbnail.isDirectoryEntry());
- assertFalse(dsi.isDirectoryEntry());
- assertFalse(si.isDirectoryEntry());
- assertTrue(image.isDirectoryEntry());
- assertFalse(tags.isDirectoryEntry());
-
- // Check via the iterator
- Iterator<Entry> it = root.getEntries();
- assertEquals(thumbnail.getName(), it.next().getName());
- assertEquals(dsi.getName(), it.next().getName());
- assertEquals(si.getName(), it.next().getName());
- assertEquals(image.getName(), it.next().getName());
- assertEquals(tags.getName(), it.next().getName());
-
- // Look inside another
- DirectoryEntry imageD = (DirectoryEntry)image;
- assertEquals(7, imageD.getEntryCount());
-
- fs.close();
- }
- }
-
- /**
- * Tests that we can get the correct contents for
- * a document in the filesystem
- */
- @Test
- public void getDocumentEntry() throws Exception {
- for(NPOIFSFileSystem fs : get512and4kFileAndInput()) {
- DirectoryEntry root = fs.getRoot();
- Entry si = root.getEntry("\u0005SummaryInformation");
-
- assertTrue(si.isDocumentEntry());
- DocumentNode doc = (DocumentNode)si;
-
- // Check we can read it
- assertContentsMatches(null, doc);
-
- // Now try to build the property set
- DocumentInputStream inp = new NDocumentInputStream(doc);
- PropertySet ps = PropertySetFactory.create(inp);
- SummaryInformation inf = (SummaryInformation)ps;
-
- // Check some bits in it
- assertNull(inf.getApplicationName());
- assertNull(inf.getAuthor());
- assertNull(inf.getSubject());
- assertEquals(131333, inf.getOSVersion());
-
- // Finish with this one
- inp.close();
-
-
- // Try the other summary information
- si = root.getEntry("\u0005DocumentSummaryInformation");
- assertTrue(si.isDocumentEntry());
- doc = (DocumentNode)si;
- assertContentsMatches(null, doc);
-
- inp = new NDocumentInputStream(doc);
- ps = PropertySetFactory.create(inp);
- DocumentSummaryInformation dinf = (DocumentSummaryInformation)ps;
- assertEquals(131333, dinf.getOSVersion());
-
- fs.close();
- }
- }
-
- /**
- * Read a file, write it and read it again.
- * Then, alter+add some streams, write and read
- */
- @Test
- public void readWriteRead() throws Exception {
- SummaryInformation sinf;
- DocumentSummaryInformation dinf;
- DirectoryEntry root, testDir;
-
- for(NPOIFSFileSystem fs1 : get512and4kFileAndInput()) {
- // Check we can find the entries we expect
- root = fs1.getRoot();
- assertEquals(5, root.getEntryCount());
- assertThat(root.getEntryNames(), hasItem("Thumbnail"));
- assertThat(root.getEntryNames(), hasItem("Image"));
- assertThat(root.getEntryNames(), hasItem("Tags"));
- assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
- assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
-
-
- // Write out, re-load
- NPOIFSFileSystem fs2 = writeOutAndReadBack(fs1);
- fs1.close();
-
- // Check they're still there
- root = fs2.getRoot();
- assertEquals(5, root.getEntryCount());
- assertThat(root.getEntryNames(), hasItem("Thumbnail"));
- assertThat(root.getEntryNames(), hasItem("Image"));
- assertThat(root.getEntryNames(), hasItem("Tags"));
- assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
- assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
-
-
- // Check the contents of them - parse the summary block and check
- sinf = (SummaryInformation)PropertySetFactory.create(new NDocumentInputStream(
- (DocumentEntry)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME)));
- assertEquals(131333, sinf.getOSVersion());
-
- dinf = (DocumentSummaryInformation)PropertySetFactory.create(new NDocumentInputStream(
- (DocumentEntry)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)));
- assertEquals(131333, dinf.getOSVersion());
-
-
- // Add a test mini stream
- testDir = root.createDirectory("Testing 123");
- testDir.createDirectory("Testing 456");
- testDir.createDirectory("Testing 789");
- byte[] mini = new byte[] { 42, 0, 1, 2, 3, 4, 42 };
- testDir.createDocument("Mini", new ByteArrayInputStream(mini));
-
-
- // Write out, re-load
- NPOIFSFileSystem fs3 = writeOutAndReadBack(fs2);
- fs2.close();
-
- root = fs3.getRoot();
- testDir = (DirectoryEntry)root.getEntry("Testing 123");
- assertEquals(6, root.getEntryCount());
- assertThat(root.getEntryNames(), hasItem("Thumbnail"));
- assertThat(root.getEntryNames(), hasItem("Image"));
- assertThat(root.getEntryNames(), hasItem("Tags"));
- assertThat(root.getEntryNames(), hasItem("Testing 123"));
- assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
- assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
-
-
- // Check old and new are there
- sinf = (SummaryInformation)PropertySetFactory.create(new NDocumentInputStream(
- (DocumentEntry)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME)));
- assertEquals(131333, sinf.getOSVersion());
-
- dinf = (DocumentSummaryInformation)PropertySetFactory.create(new NDocumentInputStream(
- (DocumentEntry)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)));
- assertEquals(131333, dinf.getOSVersion());
-
- assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini"));
-
-
- // Write out and read once more, just to be sure
- NPOIFSFileSystem fs4 = writeOutAndReadBack(fs3);
- fs3.close();
-
- root = fs4.getRoot();
- testDir = (DirectoryEntry)root.getEntry("Testing 123");
- assertEquals(6, root.getEntryCount());
- assertThat(root.getEntryNames(), hasItem("Thumbnail"));
- assertThat(root.getEntryNames(), hasItem("Image"));
- assertThat(root.getEntryNames(), hasItem("Tags"));
- assertThat(root.getEntryNames(), hasItem("Testing 123"));
- assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
- assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
-
- sinf = (SummaryInformation)PropertySetFactory.create(new NDocumentInputStream(
- (DocumentEntry)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME)));
- assertEquals(131333, sinf.getOSVersion());
-
- dinf = (DocumentSummaryInformation)PropertySetFactory.create(new NDocumentInputStream(
- (DocumentEntry)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)));
- assertEquals(131333, dinf.getOSVersion());
-
- assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini"));
-
-
- // Add a full stream, delete a full stream
- byte[] main4096 = new byte[4096];
- main4096[0] = -10;
- main4096[4095] = -11;
- testDir.createDocument("Normal4096", new ByteArrayInputStream(main4096));
-
- root.getEntry("Tags").delete();
-
-
- // Write out, re-load
- NPOIFSFileSystem fs5 = writeOutAndReadBack(fs4);
- fs4.close();
-
- // Check it's all there
- root = fs5.getRoot();
- testDir = (DirectoryEntry)root.getEntry("Testing 123");
- assertEquals(5, root.getEntryCount());
- assertThat(root.getEntryNames(), hasItem("Thumbnail"));
- assertThat(root.getEntryNames(), hasItem("Image"));
- assertThat(root.getEntryNames(), hasItem("Testing 123"));
- assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
- assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
-
-
- // Check old and new are there
- sinf = (SummaryInformation)PropertySetFactory.create(new NDocumentInputStream(
- (DocumentEntry)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME)));
- assertEquals(131333, sinf.getOSVersion());
-
- dinf = (DocumentSummaryInformation)PropertySetFactory.create(new NDocumentInputStream(
- (DocumentEntry)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)));
- assertEquals(131333, dinf.getOSVersion());
-
- assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini"));
- assertContentsMatches(main4096, (DocumentEntry)testDir.getEntry("Normal4096"));
-
-
- // Delete a directory, and add one more
- testDir.getEntry("Testing 456").delete();
- testDir.createDirectory("Testing ABC");
-
-
- // Save
- NPOIFSFileSystem fs6 = writeOutAndReadBack(fs5);
- fs5.close();
-
- // Check
- root = fs6.getRoot();
- testDir = (DirectoryEntry)root.getEntry("Testing 123");
-
- assertEquals(5, root.getEntryCount());
- assertThat(root.getEntryNames(), hasItem("Thumbnail"));
- assertThat(root.getEntryNames(), hasItem("Image"));
- assertThat(root.getEntryNames(), hasItem("Testing 123"));
- assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
- assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
-
- assertEquals(4, testDir.getEntryCount());
- assertThat(testDir.getEntryNames(), hasItem("Mini"));
- assertThat(testDir.getEntryNames(), hasItem("Normal4096"));
- assertThat(testDir.getEntryNames(), hasItem("Testing 789"));
- assertThat(testDir.getEntryNames(), hasItem("Testing ABC"));
-
-
- // Add another mini stream
- byte[] mini2 = new byte[] { -42, 0, -1, -2, -3, -4, -42 };
- testDir.createDocument("Mini2", new ByteArrayInputStream(mini2));
-
- // Save, load, check
- NPOIFSFileSystem fs7 = writeOutAndReadBack(fs6);
- fs6.close();
-
- root = fs7.getRoot();
- testDir = (DirectoryEntry)root.getEntry("Testing 123");
-
- assertEquals(5, root.getEntryCount());
- assertThat(root.getEntryNames(), hasItem("Thumbnail"));
- assertThat(root.getEntryNames(), hasItem("Image"));
- assertThat(root.getEntryNames(), hasItem("Testing 123"));
- assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
- assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
-
- assertEquals(5, testDir.getEntryCount());
- assertThat(testDir.getEntryNames(), hasItem("Mini"));
- assertThat(testDir.getEntryNames(), hasItem("Mini2"));
- assertThat(testDir.getEntryNames(), hasItem("Normal4096"));
- assertThat(testDir.getEntryNames(), hasItem("Testing 789"));
- assertThat(testDir.getEntryNames(), hasItem("Testing ABC"));
-
- assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini"));
- assertContentsMatches(mini2, (DocumentEntry)testDir.getEntry("Mini2"));
- assertContentsMatches(main4096, (DocumentEntry)testDir.getEntry("Normal4096"));
-
-
- // Delete a mini stream, add one more
- testDir.getEntry("Mini").delete();
-
- byte[] mini3 = new byte[] { 42, 0, 42, 0, 42, 0, 42 };
- testDir.createDocument("Mini3", new ByteArrayInputStream(mini3));
-
-
- // Save, load, check
- NPOIFSFileSystem fs8 = writeOutAndReadBack(fs7);
- fs7.close();
-
- root = fs8.getRoot();
- testDir = (DirectoryEntry)root.getEntry("Testing 123");
-
- assertEquals(5, root.getEntryCount());
- assertThat(root.getEntryNames(), hasItem("Thumbnail"));
- assertThat(root.getEntryNames(), hasItem("Image"));
- assertThat(root.getEntryNames(), hasItem("Testing 123"));
- assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
- assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
-
- assertEquals(5, testDir.getEntryCount());
- assertThat(testDir.getEntryNames(), hasItem("Mini2"));
- assertThat(testDir.getEntryNames(), hasItem("Mini3"));
- assertThat(testDir.getEntryNames(), hasItem("Normal4096"));
- assertThat(testDir.getEntryNames(), hasItem("Testing 789"));
- assertThat(testDir.getEntryNames(), hasItem("Testing ABC"));
-
- assertContentsMatches(mini2, (DocumentEntry)testDir.getEntry("Mini2"));
- assertContentsMatches(mini3, (DocumentEntry)testDir.getEntry("Mini3"));
- assertContentsMatches(main4096, (DocumentEntry)testDir.getEntry("Normal4096"));
-
-
- // Change some existing streams
- NPOIFSDocument mini2Doc = new NPOIFSDocument((DocumentNode)testDir.getEntry("Mini2"));
- mini2Doc.replaceContents(new ByteArrayInputStream(mini));
-
- byte[] main4106 = new byte[4106];
- main4106[0] = 41;
- main4106[4105] = 42;
- NPOIFSDocument mainDoc = new NPOIFSDocument((DocumentNode)testDir.getEntry("Normal4096"));
- mainDoc.replaceContents(new ByteArrayInputStream(main4106));
-
-
- // Re-check
- NPOIFSFileSystem fs9 = writeOutAndReadBack(fs8);
- fs8.close();
-
- root = fs9.getRoot();
- testDir = (DirectoryEntry)root.getEntry("Testing 123");
-
- assertEquals(5, root.getEntryCount());
- assertThat(root.getEntryNames(), hasItem("Thumbnail"));
- assertThat(root.getEntryNames(), hasItem("Image"));
- assertThat(root.getEntryNames(), hasItem("Testing 123"));
- assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
- assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
-
- assertEquals(5, testDir.getEntryCount());
- assertThat(testDir.getEntryNames(), hasItem("Mini2"));
- assertThat(testDir.getEntryNames(), hasItem("Mini3"));
- assertThat(testDir.getEntryNames(), hasItem("Normal4096"));
- assertThat(testDir.getEntryNames(), hasItem("Testing 789"));
- assertThat(testDir.getEntryNames(), hasItem("Testing ABC"));
-
- assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini2"));
- assertContentsMatches(mini3, (DocumentEntry)testDir.getEntry("Mini3"));
- assertContentsMatches(main4106, (DocumentEntry)testDir.getEntry("Normal4096"));
-
-
- // All done
- fs9.close();
- }
- }
-
- /**
- * Create a new file, write it and read it again
- * Then, add some streams, write and read
- */
- @Test
- public void createWriteRead() throws IOException {
- NPOIFSFileSystem fs1 = new NPOIFSFileSystem();
- DocumentEntry miniDoc;
- DocumentEntry normDoc;
-
- // Initially has Properties + BAT but not SBAT
- assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(1));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(2));
-
- // Check that the SBAT is empty
- assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getRoot().getProperty().getStartBlock());
-
- // Check that properties table was given block 0
- assertEquals(0, fs1._get_property_table().getStartBlock());
-
- // Write and read it
- NPOIFSFileSystem fs2 = writeOutAndReadBack(fs1);
- fs1.close();
-
- // No change, SBAT remains empty
- assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs2.getNextBlock(1));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs2.getNextBlock(2));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs2.getNextBlock(3));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getRoot().getProperty().getStartBlock());
- assertEquals(0, fs2._get_property_table().getStartBlock());
- fs2.close();
-
- // Check the same but with saving to a file
- NPOIFSFileSystem fs3 = new NPOIFSFileSystem();
- NPOIFSFileSystem fs4 = writeOutFileAndReadBack(fs3);
- fs3.close();
-
- // Same, no change, SBAT remains empty
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs4.getNextBlock(1));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(2));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(3));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getRoot().getProperty().getStartBlock());
- assertEquals(0, fs4._get_property_table().getStartBlock());
-
-
-
- // Put everything within a new directory
- DirectoryEntry testDir = fs4.createDirectory("Test Directory");
-
- // Add a new Normal Stream (Normal Streams minimum 4096 bytes)
- byte[] main4096 = new byte[4096];
- main4096[0] = -10;
- main4096[4095] = -11;
- testDir.createDocument("Normal4096", new ByteArrayInputStream(main4096));
-
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs4.getNextBlock(1));
- assertEquals(3, fs4.getNextBlock(2));
- assertEquals(4, fs4.getNextBlock(3));
- assertEquals(5, fs4.getNextBlock(4));
- assertEquals(6, fs4.getNextBlock(5));
- assertEquals(7, fs4.getNextBlock(6));
- assertEquals(8, fs4.getNextBlock(7));
- assertEquals(9, fs4.getNextBlock(8));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(9));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(10));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(11));
- // SBAT still unused
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getRoot().getProperty().getStartBlock());
-
-
- // Add a bigger Normal Stream
- byte[] main5124 = new byte[5124];
- main5124[0] = -22;
- main5124[5123] = -33;
- testDir.createDocument("Normal5124", new ByteArrayInputStream(main5124));
-
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs4.getNextBlock(1));
- assertEquals(3, fs4.getNextBlock(2));
- assertEquals(4, fs4.getNextBlock(3));
- assertEquals(5, fs4.getNextBlock(4));
- assertEquals(6, fs4.getNextBlock(5));
- assertEquals(7, fs4.getNextBlock(6));
- assertEquals(8, fs4.getNextBlock(7));
- assertEquals(9, fs4.getNextBlock(8));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(9));
-
- assertEquals(11, fs4.getNextBlock(10));
- assertEquals(12, fs4.getNextBlock(11));
- assertEquals(13, fs4.getNextBlock(12));
- assertEquals(14, fs4.getNextBlock(13));
- assertEquals(15, fs4.getNextBlock(14));
- assertEquals(16, fs4.getNextBlock(15));
- assertEquals(17, fs4.getNextBlock(16));
- assertEquals(18, fs4.getNextBlock(17));
- assertEquals(19, fs4.getNextBlock(18));
- assertEquals(20, fs4.getNextBlock(19));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(20));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(21));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(22));
-
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getRoot().getProperty().getStartBlock());
-
-
- // Now Add a mini stream
- byte[] mini = new byte[] { 42, 0, 1, 2, 3, 4, 42 };
- testDir.createDocument("Mini", new ByteArrayInputStream(mini));
-
- // Mini stream will get one block for fat + one block for data
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs4.getNextBlock(1));
- assertEquals(3, fs4.getNextBlock(2));
- assertEquals(4, fs4.getNextBlock(3));
- assertEquals(5, fs4.getNextBlock(4));
- assertEquals(6, fs4.getNextBlock(5));
- assertEquals(7, fs4.getNextBlock(6));
- assertEquals(8, fs4.getNextBlock(7));
- assertEquals(9, fs4.getNextBlock(8));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(9));
-
- assertEquals(11, fs4.getNextBlock(10));
- assertEquals(12, fs4.getNextBlock(11));
- assertEquals(13, fs4.getNextBlock(12));
- assertEquals(14, fs4.getNextBlock(13));
- assertEquals(15, fs4.getNextBlock(14));
- assertEquals(16, fs4.getNextBlock(15));
- assertEquals(17, fs4.getNextBlock(16));
- assertEquals(18, fs4.getNextBlock(17));
- assertEquals(19, fs4.getNextBlock(18));
- assertEquals(20, fs4.getNextBlock(19));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(20));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(21));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(22));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(23));
-
- // Check the mini stream location was set
- // (21 is mini fat, 22 is first mini stream block)
- assertEquals(22, fs4.getRoot().getProperty().getStartBlock());
-
-
- // Write and read back
- NPOIFSFileSystem fs5 = writeOutAndReadBack(fs4);
- fs4.close();
- HeaderBlock header = writeOutAndReadHeader(fs5);
-
- // Check the header has the right points in it
- assertEquals(1, header.getBATCount());
- assertEquals(1, header.getBATArray()[0]);
- assertEquals(0, header.getPropertyStart());
- assertEquals(1, header.getSBATCount());
- assertEquals(21, header.getSBATStart());
- assertEquals(22, fs5._get_property_table().getRoot().getStartBlock());
-
- // Block use should be almost the same, except the properties
- // stream will have grown out to cover 2 blocks
- // Check the block use is all unchanged
- assertEquals(23, fs5.getNextBlock(0)); // Properties now extends over 2 blocks
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs5.getNextBlock(1));
-
- assertEquals(3, fs5.getNextBlock(2));
- assertEquals(4, fs5.getNextBlock(3));
- assertEquals(5, fs5.getNextBlock(4));
- assertEquals(6, fs5.getNextBlock(5));
- assertEquals(7, fs5.getNextBlock(6));
- assertEquals(8, fs5.getNextBlock(7));
- assertEquals(9, fs5.getNextBlock(8));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(9)); // End of normal4096
-
- assertEquals(11, fs5.getNextBlock(10));
- assertEquals(12, fs5.getNextBlock(11));
- assertEquals(13, fs5.getNextBlock(12));
- assertEquals(14, fs5.getNextBlock(13));
- assertEquals(15, fs5.getNextBlock(14));
- assertEquals(16, fs5.getNextBlock(15));
- assertEquals(17, fs5.getNextBlock(16));
- assertEquals(18, fs5.getNextBlock(17));
- assertEquals(19, fs5.getNextBlock(18));
- assertEquals(20, fs5.getNextBlock(19));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(20)); // End of normal5124
-
- assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(21)); // Mini Stream FAT
- assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(22)); // Mini Stream data
- assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(23)); // Properties #2
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs5.getNextBlock(24));
-
-
- // Check some data
- assertEquals(1, fs5.getRoot().getEntryCount());
- testDir = (DirectoryEntry)fs5.getRoot().getEntry("Test Directory");
- assertEquals(3, testDir.getEntryCount());
-
- miniDoc = (DocumentEntry)testDir.getEntry("Mini");
- assertContentsMatches(mini, miniDoc);
-
- normDoc = (DocumentEntry)testDir.getEntry("Normal4096");
- assertContentsMatches(main4096, normDoc);
-
- normDoc = (DocumentEntry)testDir.getEntry("Normal5124");
- assertContentsMatches(main5124, normDoc);
-
-
- // Delete a couple of streams
- miniDoc.delete();
- normDoc.delete();
-
-
- // Check - will have un-used sectors now
- NPOIFSFileSystem fs6 = writeOutAndReadBack(fs5);
- fs5.close();
-
- assertEquals(POIFSConstants.END_OF_CHAIN, fs6.getNextBlock(0)); // Props back in 1 block
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs6.getNextBlock(1));
-
- assertEquals(3, fs6.getNextBlock(2));
- assertEquals(4, fs6.getNextBlock(3));
- assertEquals(5, fs6.getNextBlock(4));
- assertEquals(6, fs6.getNextBlock(5));
- assertEquals(7, fs6.getNextBlock(6));
- assertEquals(8, fs6.getNextBlock(7));
- assertEquals(9, fs6.getNextBlock(8));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs6.getNextBlock(9)); // End of normal4096
-
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(10));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(11));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(12));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(13));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(14));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(15));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(16));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(17));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(18));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(19));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(20));
-
- assertEquals(POIFSConstants.END_OF_CHAIN, fs6.getNextBlock(21)); // Mini Stream FAT
- assertEquals(POIFSConstants.END_OF_CHAIN, fs6.getNextBlock(22)); // Mini Stream data
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(23)); // Properties gone
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(24));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(25));
-
- // All done
- fs6.close();
- }
-
- @Test
- public void addBeforeWrite() throws IOException {
- NPOIFSFileSystem fs1 = new NPOIFSFileSystem();
- DocumentEntry miniDoc;
- DocumentEntry normDoc;
- HeaderBlock hdr;
-
- // Initially has Properties + BAT but nothing else
- assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(1));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(2));
-
- hdr = writeOutAndReadHeader(fs1);
- // No mini stream, and no xbats
- // Will have fat then properties stream
- assertEquals(1, hdr.getBATCount());
- assertEquals(1, hdr.getBATArray()[0]);
- assertEquals(0, hdr.getPropertyStart());
- assertEquals(POIFSConstants.END_OF_CHAIN, hdr.getSBATStart());
- assertEquals(POIFSConstants.END_OF_CHAIN, hdr.getXBATIndex());
- assertEquals(POIFSConstants.SMALLER_BIG_BLOCK_SIZE*3, fs1.size());
- fs1.close();
-
- // Get a clean filesystem to start with
- fs1 = new NPOIFSFileSystem();
-
- // Put our test files in a non-standard place
- DirectoryEntry parentDir = fs1.createDirectory("Parent Directory");
- DirectoryEntry testDir = parentDir.createDirectory("Test Directory");
-
-
- // Add to the mini stream
- byte[] mini = new byte[] { 42, 0, 1, 2, 3, 4, 42 };
- testDir.createDocument("Mini", new ByteArrayInputStream(mini));
-
- // Add to the main stream
- byte[] main4096 = new byte[4096];
- main4096[0] = -10;
- main4096[4095] = -11;
- testDir.createDocument("Normal4096", new ByteArrayInputStream(main4096));
-
-
- // Check the mini stream was added, then the main stream
- assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(2)); // Mini Fat
- assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(3)); // Mini Stream
- assertEquals(5, fs1.getNextBlock(4)); // Main Stream
- assertEquals(6, fs1.getNextBlock(5));
- assertEquals(7, fs1.getNextBlock(6));
- assertEquals(8, fs1.getNextBlock(7));
- assertEquals(9, fs1.getNextBlock(8));
- assertEquals(10, fs1.getNextBlock(9));
- assertEquals(11, fs1.getNextBlock(10));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(11));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(12));
- assertEquals(POIFSConstants.SMALLER_BIG_BLOCK_SIZE*13, fs1.size());
-
-
- // Check that we can read the right data pre-write
- miniDoc = (DocumentEntry)testDir.getEntry("Mini");
- assertContentsMatches(mini, miniDoc);
-
- normDoc = (DocumentEntry)testDir.getEntry("Normal4096");
- assertContentsMatches(main4096, normDoc);
-
-
- // Write, read, check
- hdr = writeOutAndReadHeader(fs1);
- NPOIFSFileSystem fs2 = writeOutAndReadBack(fs1);
- fs1.close();
-
- // Check the header details - will have the sbat near the start,
- // then the properties at the end
- assertEquals(1, hdr.getBATCount());
- assertEquals(1, hdr.getBATArray()[0]);
- assertEquals(2, hdr.getSBATStart());
- assertEquals(0, hdr.getPropertyStart());
- assertEquals(POIFSConstants.END_OF_CHAIN, hdr.getXBATIndex());
-
- // Check the block allocation is unchanged, other than
- // the properties stream going in at the end
- assertEquals(12, fs2.getNextBlock(0)); // Properties
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs2.getNextBlock(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(2));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(3));
- assertEquals(5, fs2.getNextBlock(4));
- assertEquals(6, fs2.getNextBlock(5));
- assertEquals(7, fs2.getNextBlock(6));
- assertEquals(8, fs2.getNextBlock(7));
- assertEquals(9, fs2.getNextBlock(8));
- assertEquals(10, fs2.getNextBlock(9));
- assertEquals(11, fs2.getNextBlock(10));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(11));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(12));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs2.getNextBlock(13));
- assertEquals(POIFSConstants.SMALLER_BIG_BLOCK_SIZE*14, fs2.size());
-
-
- // Check the data
- DirectoryEntry fsRoot = fs2.getRoot();
- assertEquals(1, fsRoot.getEntryCount());
-
- parentDir = (DirectoryEntry)fsRoot.getEntry("Parent Directory");
- assertEquals(1, parentDir.getEntryCount());
-
- testDir = (DirectoryEntry)parentDir.getEntry("Test Directory");
- assertEquals(2, testDir.getEntryCount());
-
- miniDoc = (DocumentEntry)testDir.getEntry("Mini");
- assertContentsMatches(mini, miniDoc);
-
- normDoc = (DocumentEntry)testDir.getEntry("Normal4096");
- assertContentsMatches(main4096, normDoc);
-
-
- // Add one more stream to each, then save and re-load
- byte[] mini2 = new byte[] { -42, 0, -1, -2, -3, -4, -42 };
- testDir.createDocument("Mini2", new ByteArrayInputStream(mini2));
-
- // Add to the main stream
- byte[] main4106 = new byte[4106];
- main4106[0] = 41;
- main4106[4105] = 42;
- testDir.createDocument("Normal4106", new ByteArrayInputStream(main4106));
-
-
- // Recheck the data in all 4 streams
- NPOIFSFileSystem fs3 = writeOutAndReadBack(fs2);
- fs2.close();
-
- fsRoot = fs3.getRoot();
- assertEquals(1, fsRoot.getEntryCount());
-
- parentDir = (DirectoryEntry)fsRoot.getEntry("Parent Directory");
- assertEquals(1, parentDir.getEntryCount());
-
- testDir = (DirectoryEntry)parentDir.getEntry("Test Directory");
- assertEquals(4, testDir.getEntryCount());
-
- miniDoc = (DocumentEntry)testDir.getEntry("Mini");
- assertContentsMatches(mini, miniDoc);
-
- miniDoc = (DocumentEntry)testDir.getEntry("Mini2");
- assertContentsMatches(mini2, miniDoc);
-
- normDoc = (DocumentEntry)testDir.getEntry("Normal4106");
- assertContentsMatches(main4106, normDoc);
-
- // All done
- fs3.close();
- }
-
- @Test
- public void readZeroLengthEntries() throws IOException {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("only-zero-byte-streams.ole2"));
- DirectoryNode testDir = fs.getRoot();
- assertEquals(3, testDir.getEntryCount());
- DocumentEntry entry;
-
- entry = (DocumentEntry)testDir.getEntry("test-zero-1");
- assertNotNull(entry);
- assertEquals(0, entry.getSize());
-
- entry = (DocumentEntry)testDir.getEntry("test-zero-2");
- assertNotNull(entry);
- assertEquals(0, entry.getSize());
-
- entry = (DocumentEntry)testDir.getEntry("test-zero-3");
- assertNotNull(entry);
- assertEquals(0, entry.getSize());
-
- // Check properties, all have zero length, no blocks
- NPropertyTable props = fs._get_property_table();
- assertEquals(POIFSConstants.END_OF_CHAIN, props.getRoot().getStartBlock());
- for (Property prop : props.getRoot()) {
- assertEquals("test-zero-", prop.getName().substring(0, 10));
- assertEquals(POIFSConstants.END_OF_CHAIN, prop.getStartBlock());
- }
-
- // All done
- fs.close();
- }
-
- @Test
- public void writeZeroLengthEntries() throws IOException {
- NPOIFSFileSystem fs1 = new NPOIFSFileSystem();
- DirectoryNode testDir = fs1.getRoot();
- DocumentEntry miniDoc;
- DocumentEntry normDoc;
- DocumentEntry emptyDoc;
-
- // Add mini and normal sized entries to start
- byte[] mini2 = new byte[] { -42, 0, -1, -2, -3, -4, -42 };
- testDir.createDocument("Mini2", new ByteArrayInputStream(mini2));
-
- // Add to the main stream
- byte[] main4106 = new byte[4106];
- main4106[0] = 41;
- main4106[4105] = 42;
- testDir.createDocument("Normal4106", new ByteArrayInputStream(main4106));
-
- // Now add some empty ones
- byte[] empty = new byte[0];
- testDir.createDocument("empty-1", new ByteArrayInputStream(empty));
- testDir.createDocument("empty-2", new ByteArrayInputStream(empty));
- testDir.createDocument("empty-3", new ByteArrayInputStream(empty));
-
- // Check
- miniDoc = (DocumentEntry)testDir.getEntry("Mini2");
- assertContentsMatches(mini2, miniDoc);
-
- normDoc = (DocumentEntry)testDir.getEntry("Normal4106");
- assertContentsMatches(main4106, normDoc);
-
- emptyDoc = (DocumentEntry)testDir.getEntry("empty-1");
- assertContentsMatches(empty, emptyDoc);
-
- emptyDoc = (DocumentEntry)testDir.getEntry("empty-2");
- assertContentsMatches(empty, emptyDoc);
-
- emptyDoc = (DocumentEntry)testDir.getEntry("empty-3");
- assertContentsMatches(empty, emptyDoc);
-
- // Look at the properties entry, and check the empty ones
- // have zero size and no start block
- NPropertyTable props = fs1._get_property_table();
- Iterator<Property> propsIt = props.getRoot().getChildren();
-
- Property prop = propsIt.next();
- assertEquals("Mini2", prop.getName());
- assertEquals(0, prop.getStartBlock());
- assertEquals(7, prop.getSize());
-
- prop = propsIt.next();
- assertEquals("Normal4106", prop.getName());
- assertEquals(4, prop.getStartBlock()); // BAT, Props, SBAT, MIni
- assertEquals(4106, prop.getSize());
-
- prop = propsIt.next();
- assertEquals("empty-1", prop.getName());
- assertEquals(POIFSConstants.END_OF_CHAIN, prop.getStartBlock());
- assertEquals(0, prop.getSize());
-
- prop = propsIt.next();
- assertEquals("empty-2", prop.getName());
- assertEquals(POIFSConstants.END_OF_CHAIN, prop.getStartBlock());
- assertEquals(0, prop.getSize());
-
- prop = propsIt.next();
- assertEquals("empty-3", prop.getName());
- assertEquals(POIFSConstants.END_OF_CHAIN, prop.getStartBlock());
- assertEquals(0, prop.getSize());
-
-
- // Save and re-check
- NPOIFSFileSystem fs2 = writeOutAndReadBack(fs1);
- fs1.close();
- testDir = fs2.getRoot();
-
- miniDoc = (DocumentEntry)testDir.getEntry("Mini2");
- assertContentsMatches(mini2, miniDoc);
-
- normDoc = (DocumentEntry)testDir.getEntry("Normal4106");
- assertContentsMatches(main4106, normDoc);
-
- emptyDoc = (DocumentEntry)testDir.getEntry("empty-1");
- assertContentsMatches(empty, emptyDoc);
-
- emptyDoc = (DocumentEntry)testDir.getEntry("empty-2");
- assertContentsMatches(empty, emptyDoc);
-
- emptyDoc = (DocumentEntry)testDir.getEntry("empty-3");
- assertContentsMatches(empty, emptyDoc);
-
- // Check that a mini-stream was assigned, with one block used
- assertEquals(3, testDir.getProperty().getStartBlock());
- assertEquals(64, testDir.getProperty().getSize());
-
- // All done
- fs2.close();
- }
-
- /**
- * Test that we can read a file with NPOIFS, create a new NPOIFS instance,
- * write it out, read it with POIFS, and see the original data
- */
- @Test
- public void NPOIFSReadCopyWritePOIFSRead() throws IOException {
- File testFile = POIDataSamples.getSpreadSheetInstance().getFile("Simple.xls");
- NPOIFSFileSystem src = new NPOIFSFileSystem(testFile);
- byte wbDataExp[] = IOUtils.toByteArray(src.createDocumentInputStream("Workbook"));
-
- NPOIFSFileSystem nfs = new NPOIFSFileSystem();
- EntryUtils.copyNodes(src.getRoot(), nfs.getRoot());
- src.close();
-
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- nfs.writeFilesystem(bos);
- nfs.close();
-
- POIFSFileSystem pfs = new POIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()));
- byte wbDataAct[] = IOUtils.toByteArray(pfs.createDocumentInputStream("Workbook"));
-
- assertThat(wbDataExp, equalTo(wbDataAct));
- pfs.close();
- }
-
- /**
- * Ensure that you can recursively delete directories and their
- * contents
- */
- @Test
- public void RecursiveDelete() throws IOException {
- File testFile = POIDataSamples.getSpreadSheetInstance().getFile("SimpleMacro.xls");
- NPOIFSFileSystem src = new NPOIFSFileSystem(testFile);
-
- // Starts out with 5 entries:
- // _VBA_PROJECT_CUR
- // SummaryInformation <(0x05)SummaryInformation>
- // DocumentSummaryInformation <(0x05)DocumentSummaryInformation>
- // Workbook
- // CompObj <(0x01)CompObj>
- assertEquals(5, _countChildren(src._get_property_table().getRoot()));
- assertEquals(5, src.getRoot().getEntryCount());
-
- // Grab the VBA project root
- DirectoryEntry vbaProj = (DirectoryEntry)src.getRoot().getEntry("_VBA_PROJECT_CUR");
- assertEquals(3, vbaProj.getEntryCount());
- // Can't delete yet, has stuff
- assertFalse(vbaProj.delete());
- // Recursively delete
- _recursiveDeletee(vbaProj);
-
- // Entries gone
- assertEquals(4, _countChildren(src._get_property_table().getRoot()));
- assertEquals(4, src.getRoot().getEntryCount());
-
- // Done
- src.close();
- }
- private void _recursiveDeletee(Entry entry) throws IOException {
- if (entry.isDocumentEntry()) {
- assertTrue(entry.delete());
- return;
- }
-
- DirectoryEntry dir = (DirectoryEntry)entry;
- String[] names = dir.getEntryNames().toArray(new String[dir.getEntryCount()]);
- for (String name : names) {
- Entry ce = dir.getEntry(name);
- _recursiveDeletee(ce);
- }
- assertTrue(dir.delete());
- }
- @SuppressWarnings("unused")
- private int _countChildren(DirectoryProperty p) {
- int count = 0;
- for (Property cp : p) { count++; }
- return count;
- }
-
- /**
- * To ensure we can create a file >2gb in size, as well as to
- * extend existing files past the 2gb boundary.
- *
- * Note that to run this test, you will require 2.5+gb of free
- * space on your TMP/TEMP partition/disk
- *
- * Note that to run this test, you need to be able to mmap 2.5+gb
- * files, which may need bigger kernel.shmmax and vm.max_map_count
- * settings on Linux.
- *
- * TODO Fix this to work...
- */
- @Test
- @Ignore("Work in progress test for #60670")
- public void CreationAndExtensionPast2GB() throws Exception {
- File big = TempFile.createTempFile("poi-test-", ".ole2");
- Assume.assumeTrue("2.5gb of free space is required on your tmp/temp " +
- "partition/disk to run large file tests",
- big.getFreeSpace() > 2.5*1024*1024*1024);
- System.out.println("Slow, memory heavy test in progress....");
-
- int s100mb = 100*1024*1024;
- int s512mb = 512*1024*1024;
- long s2gb = 2L *1024*1024*1024;
- DocumentEntry entry;
- NPOIFSFileSystem fs;
-
- // Create a just-sub 2gb file
- fs = POIFSFileSystem.create(big);
- for (int i=0; i<19; i++) {
- fs.createDocument(new DummyDataInputStream(s100mb), "Entry"+i);
- }
- fs.writeFilesystem();
- fs.close();
-
- // Extend it past the 2gb mark
- fs = new NPOIFSFileSystem(big, false);
- for (int i=0; i<19; i++) {
- entry = (DocumentEntry)fs.getRoot().getEntry("Entry"+i);
- assertNotNull(entry);
- assertEquals(s100mb, entry.getSize());
- }
-
- fs.createDocument(new DummyDataInputStream(s512mb), "Bigger");
- fs.writeFilesystem();
- fs.close();
-
- // Check it still works
- fs = new NPOIFSFileSystem(big, false);
- for (int i=0; i<19; i++) {
- entry = (DocumentEntry)fs.getRoot().getEntry("Entry"+i);
- assertNotNull(entry);
- assertEquals(s100mb, entry.getSize());
- }
- entry = (DocumentEntry)fs.getRoot().getEntry("Bigger");
- assertNotNull(entry);
- assertEquals(s512mb, entry.getSize());
-
- // Tidy
- fs.close();
- assertTrue(big.delete());
-
-
- // Create a >2gb file
- fs = POIFSFileSystem.create(big);
- for (int i=0; i<4; i++) {
- fs.createDocument(new DummyDataInputStream(s512mb), "Entry"+i);
- }
- fs.writeFilesystem();
- fs.close();
-
- // Read it
- fs = new NPOIFSFileSystem(big, false);
- for (int i=0; i<4; i++) {
- entry = (DocumentEntry)fs.getRoot().getEntry("Entry"+i);
- assertNotNull(entry);
- assertEquals(s512mb, entry.getSize());
- }
-
- // Extend it
- fs.createDocument(new DummyDataInputStream(s512mb), "Entry4");
- fs.writeFilesystem();
- fs.close();
-
- // Check it worked
- fs = new NPOIFSFileSystem(big, false);
- for (int i=0; i<5; i++) {
- entry = (DocumentEntry)fs.getRoot().getEntry("Entry"+i);
- assertNotNull(entry);
- assertEquals(s512mb, entry.getSize());
- }
-
- // Tidy
- fs.close();
- assertTrue(big.delete());
-
- // Create a file with a 2gb entry
- fs = POIFSFileSystem.create(big);
- fs.createDocument(new DummyDataInputStream(s100mb), "Small");
- // TODO Check we get a helpful error about the max size
- fs.createDocument(new DummyDataInputStream(s2gb), "Big");
- }
-
- private static final class DummyDataInputStream extends InputStream {
- private final long maxSize;
- private long size;
- private DummyDataInputStream(long maxSize) {
- this.maxSize = maxSize;
- this.size = 0;
- }
-
- public int read() {
- if (size >= maxSize) return -1;
- size++;
- return (int)(size % 128);
- }
-
- public int read(byte[] b) {
- return read(b, 0, b.length);
- }
- public int read(byte[] b, int offset, int len) {
- if (size >= maxSize) return -1;
- int sz = (int)Math.min(len, maxSize-size);
- for (int i=0; i<sz; i++) {
- b[i+offset] = (byte)((size+i) % 128);
- }
- size += sz;
- return sz;
- }
- }
-
- @Ignore("Takes a long time to run")
- @Test
- public void testPerformance() throws Exception {
- int iterations = 200;//1_000;
-
- System.out.println("OPOI:");
- long start = System.currentTimeMillis();
-
- for (int i = 0; i < iterations; i++) {
- try (InputStream inputStream = POIDataSamples.getHSMFInstance().openResourceAsStream("lots-of-recipients.msg")) {
- NPOIFSFileSystem srcFileSystem = new NPOIFSFileSystem(inputStream);
- NPOIFSFileSystem destFileSystem = new NPOIFSFileSystem();
-
- copyAllEntries(srcFileSystem.getRoot(), destFileSystem.getRoot());
-
- File file = File.createTempFile("opoi", ".dat");
- try (OutputStream outputStream = new FileOutputStream(file)) {
- destFileSystem.writeFilesystem(outputStream);
- }
-
- assertTrue(file.delete());
- if (i % 10 == 0) System.out.print(".");
- }
- }
-
- System.out.println();
- System.out.println("OPOI took: " + (System.currentTimeMillis() - start));
-
-
- System.out.println();
- System.out.println("NPOI:");
- start = System.currentTimeMillis();
-
- for (int i = 0; i < iterations; i++) {
-
- try (InputStream inputStream = POIDataSamples.getHSMFInstance().openResourceAsStream("lots-of-recipients.msg")) {
- NPOIFSFileSystem srcFileSystem = new NPOIFSFileSystem(inputStream);
- NPOIFSFileSystem destFileSystem = new NPOIFSFileSystem();
-
- copyAllEntries(srcFileSystem.getRoot(), destFileSystem.getRoot());
-
- File file = File.createTempFile("npoi", ".dat");
- try (OutputStream outputStream = new FileOutputStream(file)) {
- destFileSystem.writeFilesystem(outputStream);
- }
-
- assertTrue(file.delete());
- if (i % 10 == 0) System.out.print(".");
- }
- }
-
- System.out.println();
- System.out.println("NPOI took: " + (System.currentTimeMillis() - start));
-
- System.out.println();
- System.out.println();
- }
-
- private static void copyAllEntries(DirectoryEntry srcDirectory, DirectoryEntry destDirectory) throws IOException {
- Iterator<Entry> iterator = srcDirectory.getEntries();
-
- while (iterator.hasNext()) {
- Entry entry = iterator.next();
-
- if (entry.isDirectoryEntry()) {
- DirectoryEntry childDest = destDirectory.createDirectory(entry.getName());
- copyAllEntries((DirectoryEntry) entry, childDest);
-
- } else {
- DocumentEntry srcEntry = (DocumentEntry) entry;
-
- try (InputStream inputStream = new DocumentInputStream(srcEntry)) {
- destDirectory.createDocument(entry.getName(), inputStream);
- }
- }
- }
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.filesystem;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-
-import org.apache.poi.POIDataSamples;
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.IOUtils;
-import org.junit.Test;
-
-/**
- * Tests for the Mini Store in the NIO POIFS
- */
-public final class TestNPOIFSMiniStore {
- private static final POIDataSamples _inst = POIDataSamples.getPOIFSInstance();
-
- /**
- * Check that for a given mini block, we can correctly figure
- * out what the next one is
- */
- @Test
- public void testNextBlock() throws Exception {
- // It's the same on 512 byte and 4096 byte block files!
- NPOIFSFileSystem fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
- NPOIFSFileSystem fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- NPOIFSFileSystem fsC = new NPOIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
- NPOIFSFileSystem fsD = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB,fsC,fsD}) {
- NPOIFSMiniStore ministore = fs.getMiniStore();
-
- // 0 -> 51 is one stream
- for(int i=0; i<50; i++) {
- assertEquals(i+1, ministore.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(50));
-
- // 51 -> 103 is the next
- for(int i=51; i<103; i++) {
- assertEquals(i+1, ministore.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(103));
-
- // Then there are 3 one block ones
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(104));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(105));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(106));
-
- // 107 -> 154 is the next
- for(int i=107; i<154; i++) {
- assertEquals(i+1, ministore.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(154));
-
- // 155 -> 160 is the next
- for(int i=155; i<160; i++) {
- assertEquals(i+1, ministore.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(160));
-
- // 161 -> 166 is the next
- for(int i=161; i<166; i++) {
- assertEquals(i+1, ministore.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(166));
-
- // 167 -> 172 is the next
- for(int i=167; i<172; i++) {
- assertEquals(i+1, ministore.getNextBlock(i));
- }
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(172));
-
- // Now some short ones
- assertEquals(174 , ministore.getNextBlock(173));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(174));
-
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(175));
-
- assertEquals(177 , ministore.getNextBlock(176));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(177));
-
- assertEquals(179 , ministore.getNextBlock(178));
- assertEquals(180 , ministore.getNextBlock(179));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
-
- // 181 onwards is free
- for(int i=181; i<fs.getBigBlockSizeDetails().getBATEntriesPerBlock(); i++) {
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(i));
- }
- }
- fsD.close();
- fsC.close();
- fsB.close();
- fsA.close();
- }
-
- /**
- * Check we get the right data back for each block
- */
- @Test
- public void testGetBlock() throws Exception {
- // It's the same on 512 byte and 4096 byte block files!
- NPOIFSFileSystem fsA = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
- NPOIFSFileSystem fsB = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- NPOIFSFileSystem fsC = new NPOIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
- NPOIFSFileSystem fsD = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
- for(NPOIFSFileSystem fs : new NPOIFSFileSystem[] {fsA,fsB,fsC,fsD}) {
- // Mini stream should be at big block zero
- assertEquals(0, fs._get_property_table().getRoot().getStartBlock());
-
- // Grab the ministore
- NPOIFSMiniStore ministore = fs.getMiniStore();
- ByteBuffer b;
-
- // Runs from the start of the data section in 64 byte chungs
- b = ministore.getBlockAt(0);
- assertEquals((byte)0x9e, b.get());
- assertEquals((byte)0x75, b.get());
- assertEquals((byte)0x97, b.get());
- assertEquals((byte)0xf6, b.get());
- assertEquals((byte)0xff, b.get());
- assertEquals((byte)0x21, b.get());
- assertEquals((byte)0xd2, b.get());
- assertEquals((byte)0x11, b.get());
-
- // And the next block
- b = ministore.getBlockAt(1);
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x03, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x12, b.get());
- assertEquals((byte)0x02, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
-
- // Check the last data block
- b = ministore.getBlockAt(180);
- assertEquals((byte)0x30, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x80, b.get());
-
- // And the rest until the end of the big block is zeros
- for(int i=181; i<184; i++) {
- b = ministore.getBlockAt(i);
- assertEquals((byte)0, b.get());
- assertEquals((byte)0, b.get());
- assertEquals((byte)0, b.get());
- assertEquals((byte)0, b.get());
- assertEquals((byte)0, b.get());
- assertEquals((byte)0, b.get());
- assertEquals((byte)0, b.get());
- assertEquals((byte)0, b.get());
- }
- }
- fsD.close();
- fsC.close();
- fsB.close();
- fsA.close();
- }
-
- /**
- * Ask for free blocks where there are some already
- * to be had from the SFAT
- */
- @Test
- public void testGetFreeBlockWithSpare() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
- NPOIFSMiniStore ministore = fs.getMiniStore();
-
- // Our 2nd SBAT block has spares
- assertEquals(false, ministore.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
- assertEquals(true, ministore.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
-
- // First free one at 181
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(181));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(182));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(183));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(184));
-
- // Ask, will get 181
- assertEquals(181, ministore.getFreeBlock());
-
- // Ask again, will still get 181 as not written to
- assertEquals(181, ministore.getFreeBlock());
-
- // Allocate it, then ask again
- ministore.setNextBlock(181, POIFSConstants.END_OF_CHAIN);
- assertEquals(182, ministore.getFreeBlock());
-
- fs.close();
- }
-
- /**
- * Ask for free blocks where no free ones exist, and so the
- * stream needs to be extended and another SBAT added
- */
- @Test
- public void testGetFreeBlockWithNoneSpare() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- NPOIFSMiniStore ministore = fs.getMiniStore();
-
- // We've spare ones from 181 to 255
- for(int i=181; i<256; i++) {
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(i));
- }
-
- // Check our SBAT free stuff is correct
- assertEquals(false, ministore.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
- assertEquals(true, ministore.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
-
- // Allocate all the spare ones
- for(int i=181; i<256; i++) {
- ministore.setNextBlock(i, POIFSConstants.END_OF_CHAIN);
- }
-
- // SBAT are now full, but there's only the two
- assertEquals(false, ministore.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
- assertEquals(false, ministore.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
- try {
- assertEquals(false, ministore.getBATBlockAndIndex(256).getBlock().hasFreeSectors());
- fail("Should only be two SBATs");
- } catch(IndexOutOfBoundsException e) {}
-
- // Now ask for a free one, will need to extend the SBAT chain
- assertEquals(256, ministore.getFreeBlock());
-
- assertEquals(false, ministore.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
- assertEquals(false, ministore.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
- assertEquals(true, ministore.getBATBlockAndIndex(256).getBlock().hasFreeSectors());
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(254)); // 2nd SBAT
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(255)); // 2nd SBAT
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(256)); // 3rd SBAT
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(257)); // 3rd SBAT
-
- fs.close();
- }
-
- /**
- * Test that we will extend the underlying chain of
- * big blocks that make up the ministream as needed
- */
- @Test
- public void testCreateBlockIfNeeded() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- NPOIFSMiniStore ministore = fs.getMiniStore();
-
- // 178 -> 179 -> 180, 181+ is free
- assertEquals(179 , ministore.getNextBlock(178));
- assertEquals(180 , ministore.getNextBlock(179));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
- for(int i=181; i<256; i++) {
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(i));
- }
-
- // However, the ministore data only covers blocks to 183
- for(int i=0; i<=183; i++) {
- ministore.getBlockAt(i);
- }
- try {
- ministore.getBlockAt(184);
- fail("No block at 184");
- } catch(IndexOutOfBoundsException e) {}
-
- // The ministore itself is made up of 23 big blocks
- Iterator<ByteBuffer> it = new NPOIFSStream(fs, fs.getRoot().getProperty().getStartBlock()).getBlockIterator();
- int count = 0;
- while(it.hasNext()) {
- count++;
- it.next();
- }
- assertEquals(23, count);
-
- // Ask it to get block 184 with creating, it will do
- ministore.createBlockIfNeeded(184);
-
- // The ministore should be one big block bigger now
- it = new NPOIFSStream(fs, fs.getRoot().getProperty().getStartBlock()).getBlockIterator();
- count = 0;
- while(it.hasNext()) {
- count++;
- it.next();
- }
- assertEquals(24, count);
-
- // The mini block block counts now run to 191
- for(int i=0; i<=191; i++) {
- ministore.getBlockAt(i);
- }
- try {
- ministore.getBlockAt(192);
- fail("No block at 192");
- } catch(IndexOutOfBoundsException e) {}
-
-
- // Now try writing through to 192, check that the SBAT and blocks are there
- byte[] data = new byte[15*64];
- NPOIFSStream stream = new NPOIFSStream(ministore, 178);
- stream.updateContents(data);
-
- // Check now
- assertEquals(179 , ministore.getNextBlock(178));
- assertEquals(180 , ministore.getNextBlock(179));
- assertEquals(181 , ministore.getNextBlock(180));
- assertEquals(182 , ministore.getNextBlock(181));
- assertEquals(183 , ministore.getNextBlock(182));
- assertEquals(184 , ministore.getNextBlock(183));
- assertEquals(185 , ministore.getNextBlock(184));
- assertEquals(186 , ministore.getNextBlock(185));
- assertEquals(187 , ministore.getNextBlock(186));
- assertEquals(188 , ministore.getNextBlock(187));
- assertEquals(189 , ministore.getNextBlock(188));
- assertEquals(190 , ministore.getNextBlock(189));
- assertEquals(191 , ministore.getNextBlock(190));
- assertEquals(192 , ministore.getNextBlock(191));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(192));
- for(int i=193; i<256; i++) {
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(i));
- }
-
- fs.close();
- }
-
- @Test
- public void testCreateMiniStoreFirst() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem();
- NPOIFSMiniStore ministore = fs.getMiniStore();
- DocumentInputStream dis;
- DocumentEntry entry;
-
- // Initially has Properties + BAT but nothing else
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(1));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(2));
- // Ministore has no blocks, so can't iterate until used
- try {
- ministore.getNextBlock(0);
- } catch (IndexOutOfBoundsException e) {}
-
- // Write a very small new document, will populate the ministore for us
- byte[] data = new byte[8];
- for (int i=0; i<data.length; i++) {
- data[i] = (byte)(i+42);
- }
- fs.getRoot().createDocument("mini", new ByteArrayInputStream(data));
-
- // Should now have a mini-fat and a mini-stream
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,fs.getNextBlock(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(3));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(4));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(0));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(1));
-
- // Re-fetch the mini store, and add it a second time
- ministore = fs.getMiniStore();
- fs.getRoot().createDocument("mini2", new ByteArrayInputStream(data));
-
- // Main unchanged, ministore has a second
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,fs.getNextBlock(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(3));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(4));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(0));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(1));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(2));
-
- // Check the data is unchanged and the right length
- entry = (DocumentEntry)fs.getRoot().getEntry("mini");
- assertEquals(data.length, entry.getSize());
- byte[] rdata = new byte[data.length];
- dis = new DocumentInputStream(entry);
- IOUtils.readFully(dis, rdata);
- assertArrayEquals(data, rdata);
- dis.close();
-
- entry = (DocumentEntry)fs.getRoot().getEntry("mini2");
- assertEquals(data.length, entry.getSize());
- rdata = new byte[data.length];
- dis = new DocumentInputStream(entry);
- IOUtils.readFully(dis, rdata);
- assertArrayEquals(data, rdata);
- dis.close();
-
- // Done
- fs.close();
- }
-
- @Test
- public void testMultiBlockStream() throws Exception {
- byte[] data1B = new byte[63];
- byte[] data2B = new byte[64+14];
- for (int i=0; i<data1B.length; i++) {
- data1B[i] = (byte)(i+2);
- }
- for (int i=0; i<data2B.length; i++) {
- data2B[i] = (byte)(i+4);
- }
-
- // New filesystem and store to use
- NPOIFSFileSystem fs = new NPOIFSFileSystem();
- NPOIFSMiniStore ministore = fs.getMiniStore();
-
- // Initially has Properties + BAT but nothing else
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(1));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(2));
-
- // Store the 2 block one, should use 2 mini blocks, and request
- // the use of 2 big blocks
- ministore = fs.getMiniStore();
- fs.getRoot().createDocument("mini2", new ByteArrayInputStream(data2B));
-
- // Check
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2)); // SBAT
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(3)); // Mini
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(4));
-
- // First 2 Mini blocks will be used
- assertEquals(2, ministore.getFreeBlock());
-
- // Add one more mini-stream, and check
- fs.getRoot().createDocument("mini1", new ByteArrayInputStream(data1B));
-
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2)); // SBAT
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(3)); // Mini
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(4));
-
- // One more mini-block will be used
- assertEquals(3, ministore.getFreeBlock());
-
- // Check the contents too
- byte[] r1 = new byte[data1B.length];
- DocumentInputStream dis = fs.createDocumentInputStream("mini1");
- IOUtils.readFully(dis, r1);
- dis.close();
- assertArrayEquals(data1B, r1);
-
- byte[] r2 = new byte[data2B.length];
- dis = fs.createDocumentInputStream("mini2");
- IOUtils.readFully(dis, r2);
- dis.close();
- assertArrayEquals(data2B, r2);
- fs.close();
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.filesystem;
-
-import static org.apache.poi.poifs.filesystem.TestNPOIFSFileSystem.writeOutAndReadBack;
-
-import java.io.ByteArrayInputStream;
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-
-import junit.framework.TestCase;
-
-import org.apache.poi.POIDataSamples;
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.storage.BATBlock;
-
-/**
- * Tests {@link NPOIFSStream}
- */
-@SuppressWarnings("resource")
-public final class TestNPOIFSStream extends TestCase {
- private static final POIDataSamples _inst = POIDataSamples.getPOIFSInstance();
-
- /**
- * Read a single block stream
- */
- public void testReadTinyStream() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
-
- // 98 is actually the last block in a two block stream...
- NPOIFSStream stream = new NPOIFSStream(fs, 98);
- Iterator<ByteBuffer> i = stream.getBlockIterator();
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- ByteBuffer b = i.next();
- assertEquals(false, i.hasNext());
- assertEquals(false, i.hasNext());
- assertEquals(false, i.hasNext());
-
- // Check the contents
- assertEquals((byte)0x81, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x82, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
- assertEquals((byte)0x00, b.get());
-
- fs.close();
- }
-
- /**
- * Read a stream with only two blocks in it
- */
- public void testReadShortStream() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
-
- // 97 -> 98 -> end
- NPOIFSStream stream = new NPOIFSStream(fs, 97);
- Iterator<ByteBuffer> i = stream.getBlockIterator();
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- ByteBuffer b97 = i.next();
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- ByteBuffer b98 = i.next();
- assertEquals(false, i.hasNext());
- assertEquals(false, i.hasNext());
- assertEquals(false, i.hasNext());
-
- // Check the contents of the 1st block
- assertEquals((byte)0x01, b97.get());
- assertEquals((byte)0x00, b97.get());
- assertEquals((byte)0x00, b97.get());
- assertEquals((byte)0x00, b97.get());
- assertEquals((byte)0x02, b97.get());
- assertEquals((byte)0x00, b97.get());
- assertEquals((byte)0x00, b97.get());
- assertEquals((byte)0x00, b97.get());
-
- // Check the contents of the 2nd block
- assertEquals((byte)0x81, b98.get());
- assertEquals((byte)0x00, b98.get());
- assertEquals((byte)0x00, b98.get());
- assertEquals((byte)0x00, b98.get());
- assertEquals((byte)0x82, b98.get());
- assertEquals((byte)0x00, b98.get());
- assertEquals((byte)0x00, b98.get());
- assertEquals((byte)0x00, b98.get());
-
- fs.close();
- }
-
- /**
- * Read a stream with many blocks
- */
- public void testReadLongerStream() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
-
- ByteBuffer b0 = null;
- ByteBuffer b1 = null;
- ByteBuffer b22 = null;
-
- // The stream at 0 has 23 blocks in it
- NPOIFSStream stream = new NPOIFSStream(fs, 0);
- Iterator<ByteBuffer> i = stream.getBlockIterator();
- int count = 0;
- while(i.hasNext()) {
- ByteBuffer b = i.next();
- if(count == 0) {
- b0 = b;
- }
- if(count == 1) {
- b1 = b;
- }
- if(count == 22) {
- b22 = b;
- }
-
- count++;
- }
- assertEquals(23, count);
-
- // Check the contents
- // 1st block is at 0
- assertEquals((byte)0x9e, b0.get());
- assertEquals((byte)0x75, b0.get());
- assertEquals((byte)0x97, b0.get());
- assertEquals((byte)0xf6, b0.get());
-
- // 2nd block is at 1
- assertEquals((byte)0x86, b1.get());
- assertEquals((byte)0x09, b1.get());
- assertEquals((byte)0x22, b1.get());
- assertEquals((byte)0xfb, b1.get());
-
- // last block is at 89
- assertEquals((byte)0xfe, b22.get());
- assertEquals((byte)0xff, b22.get());
- assertEquals((byte)0x00, b22.get());
- assertEquals((byte)0x00, b22.get());
- assertEquals((byte)0x05, b22.get());
- assertEquals((byte)0x01, b22.get());
- assertEquals((byte)0x02, b22.get());
- assertEquals((byte)0x00, b22.get());
-
- fs.close();
- }
-
- /**
- * Read a stream with several blocks in a 4096 byte block file
- */
- public void testReadStream4096() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
-
- // 0 -> 1 -> 2 -> end
- NPOIFSStream stream = new NPOIFSStream(fs, 0);
- Iterator<ByteBuffer> i = stream.getBlockIterator();
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- ByteBuffer b0 = i.next();
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- ByteBuffer b1 = i.next();
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- ByteBuffer b2 = i.next();
- assertEquals(false, i.hasNext());
- assertEquals(false, i.hasNext());
- assertEquals(false, i.hasNext());
-
- // Check the contents of the 1st block
- assertEquals((byte)0x9E, b0.get());
- assertEquals((byte)0x75, b0.get());
- assertEquals((byte)0x97, b0.get());
- assertEquals((byte)0xF6, b0.get());
- assertEquals((byte)0xFF, b0.get());
- assertEquals((byte)0x21, b0.get());
- assertEquals((byte)0xD2, b0.get());
- assertEquals((byte)0x11, b0.get());
-
- // Check the contents of the 2nd block
- assertEquals((byte)0x00, b1.get());
- assertEquals((byte)0x00, b1.get());
- assertEquals((byte)0x03, b1.get());
- assertEquals((byte)0x00, b1.get());
- assertEquals((byte)0x00, b1.get());
- assertEquals((byte)0x00, b1.get());
- assertEquals((byte)0x00, b1.get());
- assertEquals((byte)0x00, b1.get());
-
- // Check the contents of the 3rd block
- assertEquals((byte)0x6D, b2.get());
- assertEquals((byte)0x00, b2.get());
- assertEquals((byte)0x00, b2.get());
- assertEquals((byte)0x00, b2.get());
- assertEquals((byte)0x03, b2.get());
- assertEquals((byte)0x00, b2.get());
- assertEquals((byte)0x46, b2.get());
- assertEquals((byte)0x00, b2.get());
-
- fs.close();
- }
-
- /**
- * Craft a nasty file with a loop, and ensure we don't get stuck
- */
- public void testReadFailsOnLoop() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
-
- // Hack the FAT so that it goes 0->1->2->0
- fs.setNextBlock(0, 1);
- fs.setNextBlock(1, 2);
- fs.setNextBlock(2, 0);
-
- // Now try to read
- NPOIFSStream stream = new NPOIFSStream(fs, 0);
- Iterator<ByteBuffer> i = stream.getBlockIterator();
- assertEquals(true, i.hasNext());
-
- // 1st read works
- i.next();
- assertEquals(true, i.hasNext());
-
- // 2nd read works
- i.next();
- assertEquals(true, i.hasNext());
-
- // 3rd read works
- i.next();
- assertEquals(true, i.hasNext());
-
- // 4th read blows up as it loops back to 0
- try {
- i.next();
- fail("Loop should have been detected but wasn't!");
- } catch(RuntimeException e) {
- // Good, it was detected
- }
- assertEquals(true, i.hasNext());
-
- fs.close();
- }
-
- /**
- * Tests that we can load some streams that are
- * stored in the mini stream.
- */
- public void testReadMiniStreams() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- NPOIFSMiniStore ministore = fs.getMiniStore();
-
- // 178 -> 179 -> 180 -> end
- NPOIFSStream stream = new NPOIFSStream(ministore, 178);
- Iterator<ByteBuffer> i = stream.getBlockIterator();
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- ByteBuffer b178 = i.next();
- assertEquals(true, i.hasNext());
- assertEquals(true, i.hasNext());
- ByteBuffer b179 = i.next();
- assertEquals(true, i.hasNext());
- ByteBuffer b180 = i.next();
- assertEquals(false, i.hasNext());
- assertEquals(false, i.hasNext());
- assertEquals(false, i.hasNext());
-
- // Check the contents of the 1st block
- assertEquals((byte)0xfe, b178.get());
- assertEquals((byte)0xff, b178.get());
- assertEquals((byte)0x00, b178.get());
- assertEquals((byte)0x00, b178.get());
- assertEquals((byte)0x05, b178.get());
- assertEquals((byte)0x01, b178.get());
- assertEquals((byte)0x02, b178.get());
- assertEquals((byte)0x00, b178.get());
-
- // And the 2nd
- assertEquals((byte)0x6c, b179.get());
- assertEquals((byte)0x00, b179.get());
- assertEquals((byte)0x00, b179.get());
- assertEquals((byte)0x00, b179.get());
- assertEquals((byte)0x28, b179.get());
- assertEquals((byte)0x00, b179.get());
- assertEquals((byte)0x00, b179.get());
- assertEquals((byte)0x00, b179.get());
-
- // And the 3rd
- assertEquals((byte)0x30, b180.get());
- assertEquals((byte)0x00, b180.get());
- assertEquals((byte)0x00, b180.get());
- assertEquals((byte)0x00, b180.get());
- assertEquals((byte)0x00, b180.get());
- assertEquals((byte)0x00, b180.get());
- assertEquals((byte)0x00, b180.get());
- assertEquals((byte)0x80, b180.get());
-
- fs.close();
- }
-
- /**
- * Writing the same amount of data as before
- */
- public void testReplaceStream() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
-
- byte[] data = new byte[512];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i%256);
- }
-
- // 98 is actually the last block in a two block stream...
- NPOIFSStream stream = new NPOIFSStream(fs, 98);
- stream.updateContents(data);
-
- // Check the reading of blocks
- Iterator<ByteBuffer> it = stream.getBlockIterator();
- assertEquals(true, it.hasNext());
- ByteBuffer b = it.next();
- assertEquals(false, it.hasNext());
-
- // Now check the contents
- data = new byte[512];
- b.get(data);
- for(int i=0; i<data.length; i++) {
- byte exp = (byte)(i%256);
- assertEquals(exp, data[i]);
- }
-
- fs.close();
- }
-
- /**
- * Writes less data than before, some blocks will need
- * to be freed
- */
- public void testReplaceStreamWithLess() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
-
- byte[] data = new byte[512];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i%256);
- }
-
- // 97 -> 98 -> end
- assertEquals(98, fs.getNextBlock(97));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(98));
-
- // Create a 2 block stream, will become a 1 block one
- NPOIFSStream stream = new NPOIFSStream(fs, 97);
- stream.updateContents(data);
-
- // 97 should now be the end, and 98 free
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(97));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(98));
-
- // Check the reading of blocks
- Iterator<ByteBuffer> it = stream.getBlockIterator();
- assertEquals(true, it.hasNext());
- ByteBuffer b = it.next();
- assertEquals(false, it.hasNext());
-
- // Now check the contents
- data = new byte[512];
- b.get(data);
- for(int i=0; i<data.length; i++) {
- byte exp = (byte)(i%256);
- assertEquals(exp, data[i]);
- }
-
- fs.close();
- }
-
- /**
- * Writes more data than before, new blocks will be needed
- */
- public void testReplaceStreamWithMore() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
-
- byte[] data = new byte[512*3];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i%256);
- }
-
- // 97 -> 98 -> end
- assertEquals(98, fs.getNextBlock(97));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(98));
-
- // 100 is our first free one
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(100));
-
- // Create a 2 block stream, will become a 3 block one
- NPOIFSStream stream = new NPOIFSStream(fs, 97);
- stream.updateContents(data);
-
- // 97 -> 98 -> 100 -> end
- assertEquals(98, fs.getNextBlock(97));
- assertEquals(100, fs.getNextBlock(98));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(100));
-
- // Check the reading of blocks
- Iterator<ByteBuffer> it = stream.getBlockIterator();
- int count = 0;
- while(it.hasNext()) {
- ByteBuffer b = it.next();
- data = new byte[512];
- b.get(data);
- for(int i=0; i<data.length; i++) {
- byte exp = (byte)(i%256);
- assertEquals(exp, data[i]);
- }
- count++;
- }
- assertEquals(3, count);
-
- fs.close();
- }
-
- /**
- * Writes to a new stream in the file
- */
- public void testWriteNewStream() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
-
- // 100 is our first free one
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(100));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(101));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(102));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(103));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(104));
-
-
- // Add a single block one
- byte[] data = new byte[512];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i%256);
- }
-
- NPOIFSStream stream = new NPOIFSStream(fs);
- stream.updateContents(data);
-
- // Check it was allocated properly
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(100));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(101));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(102));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(103));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(104));
-
- // And check the contents
- Iterator<ByteBuffer> it = stream.getBlockIterator();
- int count = 0;
- while(it.hasNext()) {
- ByteBuffer b = it.next();
- data = new byte[512];
- b.get(data);
- for(int i=0; i<data.length; i++) {
- byte exp = (byte)(i%256);
- assertEquals(exp, data[i]);
- }
- count++;
- }
- assertEquals(1, count);
-
-
- // And a multi block one
- data = new byte[512*3];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i%256);
- }
-
- stream = new NPOIFSStream(fs);
- stream.updateContents(data);
-
- // Check it was allocated properly
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(100));
- assertEquals(102, fs.getNextBlock(101));
- assertEquals(103, fs.getNextBlock(102));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(103));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(104));
-
- // And check the contents
- it = stream.getBlockIterator();
- count = 0;
- while(it.hasNext()) {
- ByteBuffer b = it.next();
- data = new byte[512];
- b.get(data);
- for(int i=0; i<data.length; i++) {
- byte exp = (byte)(i%256);
- assertEquals(exp, data[i]);
- }
- count++;
- }
- assertEquals(3, count);
-
- // Free it
- stream.free();
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(100));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(101));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(102));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(103));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(104));
-
- fs.close();
- }
-
- /**
- * Writes to a new stream in the file, where we've not enough
- * free blocks so new FAT segments will need to be allocated
- * to support this
- */
- public void testWriteNewStreamExtraFATs() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
-
- // Allocate almost all the blocks
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(100));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(127));
- for(int i=100; i<127; i++) {
- fs.setNextBlock(i, POIFSConstants.END_OF_CHAIN);
- }
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(127));
- assertEquals(true, fs.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
-
-
- // Write a 3 block stream
- byte[] data = new byte[512*3];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i%256);
- }
- NPOIFSStream stream = new NPOIFSStream(fs);
- stream.updateContents(data);
-
- // Check we got another BAT
- assertEquals(false, fs.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
- assertEquals(true, fs.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
-
- // the BAT will be in the first spot of the new block
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(126));
- assertEquals(129, fs.getNextBlock(127));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(128));
- assertEquals(130, fs.getNextBlock(129));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(130));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(131));
-
- fs.close();
- }
-
- /**
- * Replaces data in an existing stream, with a bit
- * more data than before, in a 4096 byte block file
- */
- public void testWriteStream4096() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
-
- // 0 -> 1 -> 2 -> end
- assertEquals(1, fs.getNextBlock(0));
- assertEquals(2, fs.getNextBlock(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
- assertEquals(4, fs.getNextBlock(3));
-
- // First free one is at 15
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(14));
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(15));
-
-
- // Write a 5 block file
- byte[] data = new byte[4096*5];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i%256);
- }
- NPOIFSStream stream = new NPOIFSStream(fs, 0);
- stream.updateContents(data);
-
-
- // Check it
- assertEquals(1, fs.getNextBlock(0));
- assertEquals(2, fs.getNextBlock(1));
- assertEquals(15, fs.getNextBlock(2)); // Jumps
- assertEquals(4, fs.getNextBlock(3)); // Next stream
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(14));
- assertEquals(16, fs.getNextBlock(15)); // Continues
- assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(16)); // Ends
- assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(17)); // Free
-
- // Check the contents too
- Iterator<ByteBuffer> it = stream.getBlockIterator();
- int count = 0;
- while(it.hasNext()) {
- ByteBuffer b = it.next();
- data = new byte[512];
- b.get(data);
- for(int i=0; i<data.length; i++) {
- byte exp = (byte)(i%256);
- assertEquals(exp, data[i]);
- }
- count++;
- }
- assertEquals(5, count);
-
- fs.close();
- }
-
- /**
- * Tests that we can write into the mini stream
- */
- public void testWriteMiniStreams() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
- NPOIFSMiniStore ministore = fs.getMiniStore();
- NPOIFSStream stream = new NPOIFSStream(ministore, 178);
-
- // 178 -> 179 -> 180 -> end
- assertEquals(179, ministore.getNextBlock(178));
- assertEquals(180, ministore.getNextBlock(179));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
-
-
- // Try writing 3 full blocks worth
- byte[] data = new byte[64*3];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)i;
- }
- stream = new NPOIFSStream(ministore, 178);
- stream.updateContents(data);
-
- // Check
- assertEquals(179, ministore.getNextBlock(178));
- assertEquals(180, ministore.getNextBlock(179));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
-
- stream = new NPOIFSStream(ministore, 178);
- Iterator<ByteBuffer> it = stream.getBlockIterator();
- ByteBuffer b178 = it.next();
- ByteBuffer b179 = it.next();
- ByteBuffer b180 = it.next();
- assertEquals(false, it.hasNext());
-
- assertEquals((byte)0x00, b178.get());
- assertEquals((byte)0x01, b178.get());
- assertEquals((byte)0x40, b179.get());
- assertEquals((byte)0x41, b179.get());
- assertEquals((byte)0x80, b180.get());
- assertEquals((byte)0x81, b180.get());
-
-
- // Try writing just into 3 blocks worth
- data = new byte[64*2 + 12];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i+4);
- }
- stream = new NPOIFSStream(ministore, 178);
- stream.updateContents(data);
-
- // Check
- assertEquals(179, ministore.getNextBlock(178));
- assertEquals(180, ministore.getNextBlock(179));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
-
- stream = new NPOIFSStream(ministore, 178);
- it = stream.getBlockIterator();
- b178 = it.next();
- b179 = it.next();
- b180 = it.next();
- assertEquals(false, it.hasNext());
-
- assertEquals((byte)0x04, b178.get(0));
- assertEquals((byte)0x05, b178.get(1));
- assertEquals((byte)0x44, b179.get(0));
- assertEquals((byte)0x45, b179.get(1));
- assertEquals((byte)0x84, b180.get(0));
- assertEquals((byte)0x85, b180.get(1));
-
-
- // Try writing 1, should truncate
- data = new byte[12];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i+9);
- }
- stream = new NPOIFSStream(ministore, 178);
- stream.updateContents(data);
-
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(178));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(179));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(180));
-
- stream = new NPOIFSStream(ministore, 178);
- it = stream.getBlockIterator();
- b178 = it.next();
- assertEquals(false, it.hasNext());
-
- assertEquals((byte)0x09, b178.get(0));
- assertEquals((byte)0x0a, b178.get(1));
-
-
- // Try writing 5, should extend
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(178));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(179));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(180));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(181));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(182));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(183));
-
- data = new byte[64*4 + 12];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i+3);
- }
- stream = new NPOIFSStream(ministore, 178);
- stream.updateContents(data);
-
- assertEquals(179, ministore.getNextBlock(178));
- assertEquals(180, ministore.getNextBlock(179));
- assertEquals(181, ministore.getNextBlock(180));
- assertEquals(182, ministore.getNextBlock(181));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(182));
-
- stream = new NPOIFSStream(ministore, 178);
- it = stream.getBlockIterator();
- b178 = it.next();
- b179 = it.next();
- b180 = it.next();
- ByteBuffer b181 = it.next();
- ByteBuffer b182 = it.next();
- assertEquals(false, it.hasNext());
-
- assertEquals((byte)0x03, b178.get(0));
- assertEquals((byte)0x04, b178.get(1));
- assertEquals((byte)0x43, b179.get(0));
- assertEquals((byte)0x44, b179.get(1));
- assertEquals((byte)0x83, b180.get(0));
- assertEquals((byte)0x84, b180.get(1));
- assertEquals((byte)0xc3, b181.get(0));
- assertEquals((byte)0xc4, b181.get(1));
- assertEquals((byte)0x03, b182.get(0));
- assertEquals((byte)0x04, b182.get(1));
-
-
- // Write lots, so it needs another big block
- ministore.getBlockAt(183);
- try {
- ministore.getBlockAt(184);
- fail("Block 184 should be off the end of the list");
- } catch(IndexOutOfBoundsException e) {}
-
- data = new byte[64*6 + 12];
- for(int i=0; i<data.length; i++) {
- data[i] = (byte)(i+1);
- }
- stream = new NPOIFSStream(ministore, 178);
- stream.updateContents(data);
-
- // Should have added 2 more blocks to the chain
- assertEquals(179, ministore.getNextBlock(178));
- assertEquals(180, ministore.getNextBlock(179));
- assertEquals(181, ministore.getNextBlock(180));
- assertEquals(182, ministore.getNextBlock(181));
- assertEquals(183, ministore.getNextBlock(182));
- assertEquals(184, ministore.getNextBlock(183));
- assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(184));
- assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(185));
-
- // Block 184 should exist
- ministore.getBlockAt(183);
- ministore.getBlockAt(184);
- ministore.getBlockAt(185);
-
- // Check contents
- stream = new NPOIFSStream(ministore, 178);
- it = stream.getBlockIterator();
- b178 = it.next();
- b179 = it.next();
- b180 = it.next();
- b181 = it.next();
- b182 = it.next();
- ByteBuffer b183 = it.next();
- ByteBuffer b184 = it.next();
- assertEquals(false, it.hasNext());
-
- assertEquals((byte)0x01, b178.get(0));
- assertEquals((byte)0x02, b178.get(1));
- assertEquals((byte)0x41, b179.get(0));
- assertEquals((byte)0x42, b179.get(1));
- assertEquals((byte)0x81, b180.get(0));
- assertEquals((byte)0x82, b180.get(1));
- assertEquals((byte)0xc1, b181.get(0));
- assertEquals((byte)0xc2, b181.get(1));
- assertEquals((byte)0x01, b182.get(0));
- assertEquals((byte)0x02, b182.get(1));
- assertEquals((byte)0x41, b183.get(0));
- assertEquals((byte)0x42, b183.get(1));
- assertEquals((byte)0x81, b184.get(0));
- assertEquals((byte)0x82, b184.get(1));
-
- fs.close();
- }
-
- /**
- * Craft a nasty file with a loop, and ensure we don't get stuck
- */
- public void testWriteFailsOnLoop() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
-
- // Hack the FAT so that it goes 0->1->2->0
- fs.setNextBlock(0, 1);
- fs.setNextBlock(1, 2);
- fs.setNextBlock(2, 0);
-
- // Try to write a large amount, should fail on the write
- byte[] data = new byte[512*4];
- NPOIFSStream stream = new NPOIFSStream(fs, 0);
- try {
- stream.updateContents(data);
- fail("Loop should have been detected but wasn't!");
- } catch(IllegalStateException e) {}
-
- // Now reset, and try on a small bit
- // Should fail during the freeing set
- fs.setNextBlock(0, 1);
- fs.setNextBlock(1, 2);
- fs.setNextBlock(2, 0);
-
- data = new byte[512];
- stream = new NPOIFSStream(fs, 0);
- try {
- stream.updateContents(data);
- fail("Loop should have been detected but wasn't!");
- } catch(IllegalStateException e) {}
-
- fs.close();
- }
-
- /**
- * Tests adding a new stream, writing and reading it.
- */
- public void testReadWriteNewStream() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem();
- NPOIFSStream stream = new NPOIFSStream(fs);
-
- // Check our filesystem has Properties then BAT
- assertEquals(2, fs.getFreeBlock());
- BATBlock bat = fs.getBATBlockAndIndex(0).getBlock();
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(2));
-
- // Check the stream as-is
- assertEquals(POIFSConstants.END_OF_CHAIN, stream.getStartBlock());
- try {
- stream.getBlockIterator();
- fail("Shouldn't be able to get an iterator before writing");
- } catch(IllegalStateException e) {}
-
- // Write in two blocks
- byte[] data = new byte[512+20];
- for(int i=0; i<512; i++) {
- data[i] = (byte)(i%256);
- }
- for(int i=512; i<data.length; i++) {
- data[i] = (byte)(i%256 + 100);
- }
- stream.updateContents(data);
-
- // Check now
- assertEquals(4, fs.getFreeBlock());
- bat = fs.getBATBlockAndIndex(0).getBlock();
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
- assertEquals(3, bat.getValueAt(2));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(3));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(4));
-
-
- Iterator<ByteBuffer> it = stream.getBlockIterator();
- assertEquals(true, it.hasNext());
- ByteBuffer b = it.next();
-
- byte[] read = new byte[512];
- b.get(read);
- for(int i=0; i<read.length; i++) {
- assertEquals("Wrong value at " + i, data[i], read[i]);
- }
-
- assertEquals(true, it.hasNext());
- b = it.next();
-
- read = new byte[512];
- b.get(read);
- for(int i=0; i<20; i++) {
- assertEquals(data[i+512], read[i]);
- }
- for(int i=20; i<read.length; i++) {
- assertEquals(0, read[i]);
- }
-
- assertEquals(false, it.hasNext());
-
- fs.close();
- }
-
- /**
- * Writes a stream, then replaces it
- */
- public void testWriteThenReplace() throws Exception {
- NPOIFSFileSystem fs = new NPOIFSFileSystem();
-
- // Starts empty, other that Properties and BAT
- BATBlock bat = fs.getBATBlockAndIndex(0).getBlock();
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(2));
-
- // Write something that uses a main stream
- byte[] main4106 = new byte[4106];
- main4106[0] = -10;
- main4106[4105] = -11;
- DocumentEntry normal = fs.getRoot().createDocument(
- "Normal", new ByteArrayInputStream(main4106));
-
- // Should have used 9 blocks
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
- assertEquals(3, bat.getValueAt(2));
- assertEquals(4, bat.getValueAt(3));
- assertEquals(5, bat.getValueAt(4));
- assertEquals(6, bat.getValueAt(5));
- assertEquals(7, bat.getValueAt(6));
- assertEquals(8, bat.getValueAt(7));
- assertEquals(9, bat.getValueAt(8));
- assertEquals(10, bat.getValueAt(9));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(10));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
-
- normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
- assertEquals(4106, normal.getSize());
- assertEquals(4106, ((DocumentNode)normal).getProperty().getSize());
-
-
- // Replace with one still big enough for a main stream, but one block smaller
- byte[] main4096 = new byte[4096];
- main4096[0] = -10;
- main4096[4095] = -11;
-
- NDocumentOutputStream nout = new NDocumentOutputStream(normal);
- nout.write(main4096);
- nout.close();
-
- // Will have dropped to 8
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
- assertEquals(3, bat.getValueAt(2));
- assertEquals(4, bat.getValueAt(3));
- assertEquals(5, bat.getValueAt(4));
- assertEquals(6, bat.getValueAt(5));
- assertEquals(7, bat.getValueAt(6));
- assertEquals(8, bat.getValueAt(7));
- assertEquals(9, bat.getValueAt(8));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(9));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(10));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
-
- normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
- assertEquals(4096, normal.getSize());
- assertEquals(4096, ((DocumentNode)normal).getProperty().getSize());
-
-
- // Write and check
- fs = writeOutAndReadBack(fs);
- bat = fs.getBATBlockAndIndex(0).getBlock();
-
- // No change after write
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0)); // Properties
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
- assertEquals(3, bat.getValueAt(2));
- assertEquals(4, bat.getValueAt(3));
- assertEquals(5, bat.getValueAt(4));
- assertEquals(6, bat.getValueAt(5));
- assertEquals(7, bat.getValueAt(6));
- assertEquals(8, bat.getValueAt(7));
- assertEquals(9, bat.getValueAt(8));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(9)); // End of Normal
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(10));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
-
- normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
- assertEquals(4096, normal.getSize());
- assertEquals(4096, ((DocumentNode)normal).getProperty().getSize());
-
-
- // Make longer, take 1 block at the end
- normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
- nout = new NDocumentOutputStream(normal);
- nout.write(main4106);
- nout.close();
-
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
- assertEquals(3, bat.getValueAt(2));
- assertEquals(4, bat.getValueAt(3));
- assertEquals(5, bat.getValueAt(4));
- assertEquals(6, bat.getValueAt(5));
- assertEquals(7, bat.getValueAt(6));
- assertEquals(8, bat.getValueAt(7));
- assertEquals(9, bat.getValueAt(8));
- assertEquals(10, bat.getValueAt(9));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(10)); // Normal
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(12));
-
- normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
- assertEquals(4106, normal.getSize());
- assertEquals(4106, ((DocumentNode)normal).getProperty().getSize());
-
-
- // Make it small, will trigger the SBAT stream and free lots up
- byte[] mini = new byte[] { 42, 0, 1, 2, 3, 4, 42 };
- normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
- nout = new NDocumentOutputStream(normal);
- nout.write(mini);
- nout.close();
-
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, bat.getValueAt(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(2)); // SBAT
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(3)); // Mini Stream
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(4));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(5));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(6));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(7));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(8));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(9));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(10));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(12));
-
- normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
- assertEquals(7, normal.getSize());
- assertEquals(7, ((DocumentNode)normal).getProperty().getSize());
-
-
- // Finally back to big again
- nout = new NDocumentOutputStream(normal);
- nout.write(main4096);
- nout.close();
-
- // Will keep the mini stream, now empty
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, bat.getValueAt(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(2)); // SBAT
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(3)); // Mini Stream
- assertEquals(5, bat.getValueAt(4));
- assertEquals(6, bat.getValueAt(5));
- assertEquals(7, bat.getValueAt(6));
- assertEquals(8, bat.getValueAt(7));
- assertEquals(9, bat.getValueAt(8));
- assertEquals(10, bat.getValueAt(9));
- assertEquals(11, bat.getValueAt(10));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(11));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(12));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(13));
-
- normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
- assertEquals(4096, normal.getSize());
- assertEquals(4096, ((DocumentNode)normal).getProperty().getSize());
-
-
- // Save, re-load, re-check
- fs = writeOutAndReadBack(fs);
- bat = fs.getBATBlockAndIndex(0).getBlock();
-
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
- assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, bat.getValueAt(1));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(2)); // SBAT
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(3)); // Mini Stream
- assertEquals(5, bat.getValueAt(4));
- assertEquals(6, bat.getValueAt(5));
- assertEquals(7, bat.getValueAt(6));
- assertEquals(8, bat.getValueAt(7));
- assertEquals(9, bat.getValueAt(8));
- assertEquals(10, bat.getValueAt(9));
- assertEquals(11, bat.getValueAt(10));
- assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(11));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(12));
- assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(13));
-
- normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
- assertEquals(4096, normal.getSize());
- assertEquals(4096, ((DocumentNode)normal).getProperty().getSize());
-
- fs.close();
- }
-}
package org.apache.poi.poifs.filesystem;
+import static org.apache.poi.POITestCase.assertContains;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
-import static org.apache.poi.POITestCase.assertContains;
import java.io.ByteArrayOutputStream;
import java.io.File;
import org.apache.poi.POIDataSamples;
import org.apache.poi.util.IOUtils;
-import org.junit.Ignore;
import org.junit.Test;
public class TestOle10Native {
};
for (File f : files) {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(f, true);
+ POIFSFileSystem fs = new POIFSFileSystem(f, true);
List<Entry> entries = new ArrayList<>();
- findOle10(entries, fs.getRoot(), "/", "");
+ findOle10(entries, fs.getRoot(), "/");
for (Entry e : entries) {
ByteArrayOutputStream bosExp = new ByteArrayOutputStream();
}
}
- /*
- void searchOle10Files() throws Exception {
- File dir = new File("test-data/document");
- for (File file : dir.listFiles(new FileFilter(){
- public boolean accept(File pathname) {
- return pathname.getName().endsWith("doc");
- }
- })) {
- NPOIFSFileSystem fs = new NPOIFSFileSystem(file, true);
- findOle10(null, fs.getRoot(), "/", file.getName());
- fs.close();
- }
- }*/
-
- void findOle10(List<Entry> entries, DirectoryNode dn, String path, String filename) {
+ private void findOle10(List<Entry> entries, DirectoryNode dn, String path) {
Iterator<Entry> iter = dn.getEntries();
while (iter.hasNext()) {
Entry e = iter.next();
if (entries != null) entries.add(e);
// System.out.println(filename+" : "+path);
} else if (e.isDirectoryEntry()) {
- findOle10(entries, (DirectoryNode)e, path+e.getName()+"/", filename);
+ findOle10(entries, (DirectoryNode)e, path+e.getName()+"/");
}
}
}
@Test
- public void testOleNativeOOM() throws IOException, Ole10NativeException {
+ public void testOleNativeOOM() throws IOException {
POIFSFileSystem fs = new POIFSFileSystem(dataSamples.openResourceAsStream("60256.bin"));
try {
Ole10Native.createFromEmbeddedOleObject(fs);
import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.storage.BATBlock;
-import org.apache.poi.poifs.storage.BlockAllocationTableReader;
import org.apache.poi.poifs.storage.HeaderBlock;
-import org.apache.poi.poifs.storage.RawDataBlockList;
import org.apache.poi.util.IOUtils;
import org.junit.Rule;
import org.junit.Test;
// Normal case - read until EOF and close
testIS = new TestIS(openSampleStream("13224.xls"), -1);
- try (NPOIFSFileSystem ignored = new NPOIFSFileSystem(testIS)){
+ try (POIFSFileSystem ignored = new POIFSFileSystem(testIS)){
assertTrue("input stream was not closed", testIS.isClosed());
}
// intended to crash after reading 10000 bytes
testIS = new TestIS(openSampleStream("13224.xls"), 10000);
- try (NPOIFSFileSystem ignored = new NPOIFSFileSystem(testIS)){
+ try (POIFSFileSystem ignored = new POIFSFileSystem(testIS)){
fail("ex should have been thrown");
} catch (MyEx e) {
// expected
for (String file : files) {
// Open the file up
- NPOIFSFileSystem fs = new NPOIFSFileSystem(
+ POIFSFileSystem fs = new POIFSFileSystem(
_samples.openResourceAsStream(file)
);
expectedEx.expect(IndexOutOfBoundsException.class);
expectedEx.expectMessage("Block 1148 not found");
try (InputStream stream = _samples.openResourceAsStream("ReferencesInvalidSectors.mpp")) {
- new NPOIFSFileSystem(stream);
+ new POIFSFileSystem(stream);
fail("File is corrupt and shouldn't have been opened");
}
}
@Test
public void testBATandXBAT() throws Exception {
byte[] hugeStream = new byte[8*1024*1024];
- NPOIFSFileSystem fs = new NPOIFSFileSystem();
+ POIFSFileSystem fs = new POIFSFileSystem();
fs.getRoot().createDocument(
"BIG", new ByteArrayInputStream(hugeStream)
);
assertEquals(POIFSConstants.END_OF_CHAIN, xbat.getValueAt(127));
- // Load the blocks and check with that
- RawDataBlockList blockList = new RawDataBlockList(inp, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- assertEquals(fsData.length / 512, blockList.blockCount() + 1); // Header not counted
- new BlockAllocationTableReader(header.getBigBlockSize(),
- header.getBATCount(),
- header.getBATArray(),
- header.getXBATCount(),
- header.getXBATIndex(),
- blockList);
- assertEquals(fsData.length / 512, blockList.blockCount() + 1); // Header not counted
-
// Now load it and check
- fs = new NPOIFSFileSystem(
+ fs = new POIFSFileSystem(
new ByteArrayInputStream(fsData)
);
assertEquals(1, header_block.getBATCount());
assertEquals(0, header_block.getXBATCount());
- // Now check we can get the basic fat
- RawDataBlockList data_blocks = new RawDataBlockList(inp,
- bigBlockSize);
- assertEquals(15, data_blocks.blockCount());
-
// Now try and open properly
- NPOIFSFileSystem fs = new NPOIFSFileSystem(
+ POIFSFileSystem fs = new POIFSFileSystem(
_samples.openResourceAsStream("BlockSize4096.zvi"));
assertTrue(fs.getRoot().getEntryCount() > 3);
checkAllDirectoryContents(fs.getRoot());
// Finally, check we can do a similar 512byte one too
- fs = new NPOIFSFileSystem(
+ fs = new POIFSFileSystem(
_samples.openResourceAsStream("BlockSize512.zvi"));
assertTrue(fs.getRoot().getEntryCount() > 3);
checkAllDirectoryContents(fs.getRoot());
--- /dev/null
+/* ====================================================================
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+==================================================================== */
+
+package org.apache.poi.poifs.filesystem;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+
+import org.apache.poi.POIDataSamples;
+import org.apache.poi.poifs.common.POIFSConstants;
+import org.apache.poi.util.IOUtils;
+import org.junit.Test;
+
+/**
+ * Tests for the Mini Store in the NIO POIFS
+ */
+@SuppressWarnings("CatchMayIgnoreException")
+public final class TestPOIFSMiniStore {
+ private static final POIDataSamples _inst = POIDataSamples.getPOIFSInstance();
+
+ /**
+ * Check that for a given mini block, we can correctly figure
+ * out what the next one is
+ */
+ @Test
+ public void testNextBlock() throws Exception {
+ // It's the same on 512 byte and 4096 byte block files!
+ POIFSFileSystem fsA = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+ POIFSFileSystem fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ POIFSFileSystem fsC = new POIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
+ POIFSFileSystem fsD = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB,fsC,fsD}) {
+ POIFSMiniStore ministore = fs.getMiniStore();
+
+ // 0 -> 51 is one stream
+ for(int i=0; i<50; i++) {
+ assertEquals(i+1, ministore.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(50));
+
+ // 51 -> 103 is the next
+ for(int i=51; i<103; i++) {
+ assertEquals(i+1, ministore.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(103));
+
+ // Then there are 3 one block ones
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(104));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(105));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(106));
+
+ // 107 -> 154 is the next
+ for(int i=107; i<154; i++) {
+ assertEquals(i+1, ministore.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(154));
+
+ // 155 -> 160 is the next
+ for(int i=155; i<160; i++) {
+ assertEquals(i+1, ministore.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(160));
+
+ // 161 -> 166 is the next
+ for(int i=161; i<166; i++) {
+ assertEquals(i+1, ministore.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(166));
+
+ // 167 -> 172 is the next
+ for(int i=167; i<172; i++) {
+ assertEquals(i+1, ministore.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(172));
+
+ // Now some short ones
+ assertEquals(174 , ministore.getNextBlock(173));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(174));
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(175));
+
+ assertEquals(177 , ministore.getNextBlock(176));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(177));
+
+ assertEquals(179 , ministore.getNextBlock(178));
+ assertEquals(180 , ministore.getNextBlock(179));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
+
+ // 181 onwards is free
+ for(int i=181; i<fs.getBigBlockSizeDetails().getBATEntriesPerBlock(); i++) {
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(i));
+ }
+ }
+ fsD.close();
+ fsC.close();
+ fsB.close();
+ fsA.close();
+ }
+
+ /**
+ * Check we get the right data back for each block
+ */
+ @Test
+ public void testGetBlock() throws Exception {
+ // It's the same on 512 byte and 4096 byte block files!
+ POIFSFileSystem fsA = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+ POIFSFileSystem fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ POIFSFileSystem fsC = new POIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
+ POIFSFileSystem fsD = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB,fsC,fsD}) {
+ // Mini stream should be at big block zero
+ assertEquals(0, fs._get_property_table().getRoot().getStartBlock());
+
+ // Grab the ministore
+ POIFSMiniStore ministore = fs.getMiniStore();
+ ByteBuffer b;
+
+ // Runs from the start of the data section in 64 byte chungs
+ b = ministore.getBlockAt(0);
+ assertEquals((byte)0x9e, b.get());
+ assertEquals((byte)0x75, b.get());
+ assertEquals((byte)0x97, b.get());
+ assertEquals((byte)0xf6, b.get());
+ assertEquals((byte)0xff, b.get());
+ assertEquals((byte)0x21, b.get());
+ assertEquals((byte)0xd2, b.get());
+ assertEquals((byte)0x11, b.get());
+
+ // And the next block
+ b = ministore.getBlockAt(1);
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x03, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x12, b.get());
+ assertEquals((byte)0x02, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+
+ // Check the last data block
+ b = ministore.getBlockAt(180);
+ assertEquals((byte)0x30, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x80, b.get());
+
+ // And the rest until the end of the big block is zeros
+ for(int i=181; i<184; i++) {
+ b = ministore.getBlockAt(i);
+ assertEquals((byte)0, b.get());
+ assertEquals((byte)0, b.get());
+ assertEquals((byte)0, b.get());
+ assertEquals((byte)0, b.get());
+ assertEquals((byte)0, b.get());
+ assertEquals((byte)0, b.get());
+ assertEquals((byte)0, b.get());
+ assertEquals((byte)0, b.get());
+ }
+ }
+ fsD.close();
+ fsC.close();
+ fsB.close();
+ fsA.close();
+ }
+
+ /**
+ * Ask for free blocks where there are some already
+ * to be had from the SFAT
+ */
+ @Test
+ public void testGetFreeBlockWithSpare() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+ POIFSMiniStore ministore = fs.getMiniStore();
+
+ // Our 2nd SBAT block has spares
+ assertFalse(ministore.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+ assertTrue(ministore.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
+
+ // First free one at 181
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(181));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(182));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(183));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(184));
+
+ // Ask, will get 181
+ assertEquals(181, ministore.getFreeBlock());
+
+ // Ask again, will still get 181 as not written to
+ assertEquals(181, ministore.getFreeBlock());
+
+ // Allocate it, then ask again
+ ministore.setNextBlock(181, POIFSConstants.END_OF_CHAIN);
+ assertEquals(182, ministore.getFreeBlock());
+
+ fs.close();
+ }
+
+ /**
+ * Ask for free blocks where no free ones exist, and so the
+ * stream needs to be extended and another SBAT added
+ */
+ @Test
+ public void testGetFreeBlockWithNoneSpare() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ POIFSMiniStore ministore = fs.getMiniStore();
+
+ // We've spare ones from 181 to 255
+ for(int i=181; i<256; i++) {
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(i));
+ }
+
+ // Check our SBAT free stuff is correct
+ assertFalse(ministore.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+ assertTrue(ministore.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
+
+ // Allocate all the spare ones
+ for(int i=181; i<256; i++) {
+ ministore.setNextBlock(i, POIFSConstants.END_OF_CHAIN);
+ }
+
+ // SBAT are now full, but there's only the two
+ assertFalse(ministore.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+ assertFalse(ministore.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
+ try {
+ assertFalse(ministore.getBATBlockAndIndex(256).getBlock().hasFreeSectors());
+ fail("Should only be two SBATs");
+ } catch(IndexOutOfBoundsException e) {}
+
+ // Now ask for a free one, will need to extend the SBAT chain
+ assertEquals(256, ministore.getFreeBlock());
+
+ assertFalse(ministore.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+ assertFalse(ministore.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
+ assertTrue(ministore.getBATBlockAndIndex(256).getBlock().hasFreeSectors());
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(254)); // 2nd SBAT
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(255)); // 2nd SBAT
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(256)); // 3rd SBAT
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(257)); // 3rd SBAT
+
+ fs.close();
+ }
+
+ /**
+ * Test that we will extend the underlying chain of
+ * big blocks that make up the ministream as needed
+ */
+ @Test
+ public void testCreateBlockIfNeeded() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ POIFSMiniStore ministore = fs.getMiniStore();
+
+ // 178 -> 179 -> 180, 181+ is free
+ assertEquals(179 , ministore.getNextBlock(178));
+ assertEquals(180 , ministore.getNextBlock(179));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
+ for(int i=181; i<256; i++) {
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(i));
+ }
+
+ // However, the ministore data only covers blocks to 183
+ for(int i=0; i<=183; i++) {
+ ministore.getBlockAt(i);
+ }
+ try {
+ ministore.getBlockAt(184);
+ fail("No block at 184");
+ } catch(IndexOutOfBoundsException e) {}
+
+ // The ministore itself is made up of 23 big blocks
+ Iterator<ByteBuffer> it = new POIFSStream(fs, fs.getRoot().getProperty().getStartBlock()).getBlockIterator();
+ int count = 0;
+ while(it.hasNext()) {
+ count++;
+ it.next();
+ }
+ assertEquals(23, count);
+
+ // Ask it to get block 184 with creating, it will do
+ ministore.createBlockIfNeeded(184);
+
+ // The ministore should be one big block bigger now
+ it = new POIFSStream(fs, fs.getRoot().getProperty().getStartBlock()).getBlockIterator();
+ count = 0;
+ while(it.hasNext()) {
+ count++;
+ it.next();
+ }
+ assertEquals(24, count);
+
+ // The mini block block counts now run to 191
+ for(int i=0; i<=191; i++) {
+ ministore.getBlockAt(i);
+ }
+ try {
+ ministore.getBlockAt(192);
+ fail("No block at 192");
+ } catch(IndexOutOfBoundsException e) {}
+
+
+ // Now try writing through to 192, check that the SBAT and blocks are there
+ byte[] data = new byte[15*64];
+ POIFSStream stream = new POIFSStream(ministore, 178);
+ stream.updateContents(data);
+
+ // Check now
+ assertEquals(179 , ministore.getNextBlock(178));
+ assertEquals(180 , ministore.getNextBlock(179));
+ assertEquals(181 , ministore.getNextBlock(180));
+ assertEquals(182 , ministore.getNextBlock(181));
+ assertEquals(183 , ministore.getNextBlock(182));
+ assertEquals(184 , ministore.getNextBlock(183));
+ assertEquals(185 , ministore.getNextBlock(184));
+ assertEquals(186 , ministore.getNextBlock(185));
+ assertEquals(187 , ministore.getNextBlock(186));
+ assertEquals(188 , ministore.getNextBlock(187));
+ assertEquals(189 , ministore.getNextBlock(188));
+ assertEquals(190 , ministore.getNextBlock(189));
+ assertEquals(191 , ministore.getNextBlock(190));
+ assertEquals(192 , ministore.getNextBlock(191));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(192));
+ for(int i=193; i<256; i++) {
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(i));
+ }
+
+ fs.close();
+ }
+
+ @Test
+ public void testCreateMiniStoreFirst() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem();
+ POIFSMiniStore ministore = fs.getMiniStore();
+ DocumentInputStream dis;
+ DocumentEntry entry;
+
+ // Initially has Properties + BAT but nothing else
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(1));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(2));
+ // Ministore has no blocks, so can't iterate until used
+ try {
+ ministore.getNextBlock(0);
+ } catch (IndexOutOfBoundsException e) {}
+
+ // Write a very small new document, will populate the ministore for us
+ byte[] data = new byte[8];
+ for (int i=0; i<data.length; i++) {
+ data[i] = (byte)(i+42);
+ }
+ fs.getRoot().createDocument("mini", new ByteArrayInputStream(data));
+
+ // Should now have a mini-fat and a mini-stream
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,fs.getNextBlock(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(3));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(4));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(0));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(1));
+
+ // Re-fetch the mini store, and add it a second time
+ ministore = fs.getMiniStore();
+ fs.getRoot().createDocument("mini2", new ByteArrayInputStream(data));
+
+ // Main unchanged, ministore has a second
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,fs.getNextBlock(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(3));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(4));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(0));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(1));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(2));
+
+ // Check the data is unchanged and the right length
+ entry = (DocumentEntry)fs.getRoot().getEntry("mini");
+ assertEquals(data.length, entry.getSize());
+ byte[] rdata = new byte[data.length];
+ dis = new DocumentInputStream(entry);
+ IOUtils.readFully(dis, rdata);
+ assertArrayEquals(data, rdata);
+ dis.close();
+
+ entry = (DocumentEntry)fs.getRoot().getEntry("mini2");
+ assertEquals(data.length, entry.getSize());
+ rdata = new byte[data.length];
+ dis = new DocumentInputStream(entry);
+ IOUtils.readFully(dis, rdata);
+ assertArrayEquals(data, rdata);
+ dis.close();
+
+ // Done
+ fs.close();
+ }
+
+ @Test
+ public void testMultiBlockStream() throws Exception {
+ byte[] data1B = new byte[63];
+ byte[] data2B = new byte[64+14];
+ for (int i=0; i<data1B.length; i++) {
+ data1B[i] = (byte)(i+2);
+ }
+ for (int i=0; i<data2B.length; i++) {
+ data2B[i] = (byte)(i+4);
+ }
+
+ // New filesystem and store to use
+ POIFSFileSystem fs = new POIFSFileSystem();
+
+ // Initially has Properties + BAT but nothing else
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(1));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(2));
+
+ // Store the 2 block one, should use 2 mini blocks, and request
+ // the use of 2 big blocks
+ POIFSMiniStore ministore = fs.getMiniStore();
+ fs.getRoot().createDocument("mini2", new ByteArrayInputStream(data2B));
+
+ // Check
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2)); // SBAT
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(3)); // Mini
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(4));
+
+ // First 2 Mini blocks will be used
+ assertEquals(2, ministore.getFreeBlock());
+
+ // Add one more mini-stream, and check
+ fs.getRoot().createDocument("mini1", new ByteArrayInputStream(data1B));
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2)); // SBAT
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(3)); // Mini
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(4));
+
+ // One more mini-block will be used
+ assertEquals(3, ministore.getFreeBlock());
+
+ // Check the contents too
+ byte[] r1 = new byte[data1B.length];
+ DocumentInputStream dis = fs.createDocumentInputStream("mini1");
+ IOUtils.readFully(dis, r1);
+ dis.close();
+ assertArrayEquals(data1B, r1);
+
+ byte[] r2 = new byte[data2B.length];
+ dis = fs.createDocumentInputStream("mini2");
+ IOUtils.readFully(dis, r2);
+ dis.close();
+ assertArrayEquals(data2B, r2);
+ fs.close();
+ }
+}
--- /dev/null
+/* ====================================================================
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+==================================================================== */
+
+package org.apache.poi.poifs.filesystem;
+
+import static org.hamcrest.core.IsCollectionContaining.hasItem;
+import static org.hamcrest.core.IsEqual.equalTo;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+
+import junit.framework.TestCase;
+import org.apache.poi.POIDataSamples;
+import org.apache.poi.hpsf.DocumentSummaryInformation;
+import org.apache.poi.hpsf.PropertySet;
+import org.apache.poi.hpsf.PropertySetFactory;
+import org.apache.poi.hpsf.SummaryInformation;
+import org.apache.poi.poifs.common.POIFSConstants;
+import org.apache.poi.poifs.property.DirectoryProperty;
+import org.apache.poi.poifs.property.Property;
+import org.apache.poi.poifs.property.PropertyTable;
+import org.apache.poi.poifs.property.RootProperty;
+import org.apache.poi.poifs.storage.BATBlock;
+import org.apache.poi.poifs.storage.HeaderBlock;
+import org.apache.poi.util.IOUtils;
+import org.apache.poi.util.TempFile;
+import org.junit.Assume;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Tests {@link POIFSStream}
+ */
+public final class TestPOIFSStream {
+ private static final POIDataSamples _inst = POIDataSamples.getPOIFSInstance();
+
+ /**
+ * Read a single block stream
+ */
+ @Test
+ public void testReadTinyStream() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+
+ // 98 is actually the last block in a two block stream...
+ POIFSStream stream = new POIFSStream(fs, 98);
+ Iterator<ByteBuffer> i = stream.getBlockIterator();
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ ByteBuffer b = i.next();
+ assertFalse(i.hasNext());
+ assertFalse(i.hasNext());
+ assertFalse(i.hasNext());
+
+ // Check the contents
+ assertEquals((byte)0x81, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x82, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+
+ fs.close();
+ }
+
+ /**
+ * Read a stream with only two blocks in it
+ */
+ @Test
+ public void testReadShortStream() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+
+ // 97 -> 98 -> end
+ POIFSStream stream = new POIFSStream(fs, 97);
+ Iterator<ByteBuffer> i = stream.getBlockIterator();
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ ByteBuffer b97 = i.next();
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ ByteBuffer b98 = i.next();
+ assertFalse(i.hasNext());
+ assertFalse(i.hasNext());
+ assertFalse(i.hasNext());
+
+ // Check the contents of the 1st block
+ assertEquals((byte)0x01, b97.get());
+ assertEquals((byte)0x00, b97.get());
+ assertEquals((byte)0x00, b97.get());
+ assertEquals((byte)0x00, b97.get());
+ assertEquals((byte)0x02, b97.get());
+ assertEquals((byte)0x00, b97.get());
+ assertEquals((byte)0x00, b97.get());
+ assertEquals((byte)0x00, b97.get());
+
+ // Check the contents of the 2nd block
+ assertEquals((byte)0x81, b98.get());
+ assertEquals((byte)0x00, b98.get());
+ assertEquals((byte)0x00, b98.get());
+ assertEquals((byte)0x00, b98.get());
+ assertEquals((byte)0x82, b98.get());
+ assertEquals((byte)0x00, b98.get());
+ assertEquals((byte)0x00, b98.get());
+ assertEquals((byte)0x00, b98.get());
+
+ fs.close();
+ }
+
+ /**
+ * Read a stream with many blocks
+ */
+ @Test
+ public void testReadLongerStream() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+
+ ByteBuffer b0 = null;
+ ByteBuffer b1 = null;
+ ByteBuffer b22 = null;
+
+ // The stream at 0 has 23 blocks in it
+ POIFSStream stream = new POIFSStream(fs, 0);
+ Iterator<ByteBuffer> i = stream.getBlockIterator();
+ int count = 0;
+ while(i.hasNext()) {
+ ByteBuffer b = i.next();
+ if(count == 0) {
+ b0 = b;
+ }
+ if(count == 1) {
+ b1 = b;
+ }
+ if(count == 22) {
+ b22 = b;
+ }
+
+ count++;
+ }
+ assertEquals(23, count);
+
+ // Check the contents
+ // 1st block is at 0
+ assertEquals((byte)0x9e, b0.get());
+ assertEquals((byte)0x75, b0.get());
+ assertEquals((byte)0x97, b0.get());
+ assertEquals((byte)0xf6, b0.get());
+
+ // 2nd block is at 1
+ assertEquals((byte)0x86, b1.get());
+ assertEquals((byte)0x09, b1.get());
+ assertEquals((byte)0x22, b1.get());
+ assertEquals((byte)0xfb, b1.get());
+
+ // last block is at 89
+ assertEquals((byte)0xfe, b22.get());
+ assertEquals((byte)0xff, b22.get());
+ assertEquals((byte)0x00, b22.get());
+ assertEquals((byte)0x00, b22.get());
+ assertEquals((byte)0x05, b22.get());
+ assertEquals((byte)0x01, b22.get());
+ assertEquals((byte)0x02, b22.get());
+ assertEquals((byte)0x00, b22.get());
+
+ fs.close();
+ }
+
+ /**
+ * Read a stream with several blocks in a 4096 byte block file
+ */
+ @Test
+ public void testReadStream4096() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
+
+ // 0 -> 1 -> 2 -> end
+ POIFSStream stream = new POIFSStream(fs, 0);
+ Iterator<ByteBuffer> i = stream.getBlockIterator();
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ ByteBuffer b0 = i.next();
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ ByteBuffer b1 = i.next();
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ ByteBuffer b2 = i.next();
+ assertFalse(i.hasNext());
+ assertFalse(i.hasNext());
+ assertFalse(i.hasNext());
+
+ // Check the contents of the 1st block
+ assertEquals((byte)0x9E, b0.get());
+ assertEquals((byte)0x75, b0.get());
+ assertEquals((byte)0x97, b0.get());
+ assertEquals((byte)0xF6, b0.get());
+ assertEquals((byte)0xFF, b0.get());
+ assertEquals((byte)0x21, b0.get());
+ assertEquals((byte)0xD2, b0.get());
+ assertEquals((byte)0x11, b0.get());
+
+ // Check the contents of the 2nd block
+ assertEquals((byte)0x00, b1.get());
+ assertEquals((byte)0x00, b1.get());
+ assertEquals((byte)0x03, b1.get());
+ assertEquals((byte)0x00, b1.get());
+ assertEquals((byte)0x00, b1.get());
+ assertEquals((byte)0x00, b1.get());
+ assertEquals((byte)0x00, b1.get());
+ assertEquals((byte)0x00, b1.get());
+
+ // Check the contents of the 3rd block
+ assertEquals((byte)0x6D, b2.get());
+ assertEquals((byte)0x00, b2.get());
+ assertEquals((byte)0x00, b2.get());
+ assertEquals((byte)0x00, b2.get());
+ assertEquals((byte)0x03, b2.get());
+ assertEquals((byte)0x00, b2.get());
+ assertEquals((byte)0x46, b2.get());
+ assertEquals((byte)0x00, b2.get());
+
+ fs.close();
+ }
+
+ /**
+ * Craft a nasty file with a loop, and ensure we don't get stuck
+ */
+ @Test
+ public void testReadFailsOnLoop() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+
+ // Hack the FAT so that it goes 0->1->2->0
+ fs.setNextBlock(0, 1);
+ fs.setNextBlock(1, 2);
+ fs.setNextBlock(2, 0);
+
+ // Now try to read
+ POIFSStream stream = new POIFSStream(fs, 0);
+ Iterator<ByteBuffer> i = stream.getBlockIterator();
+ assertTrue(i.hasNext());
+
+ // 1st read works
+ i.next();
+ assertTrue(i.hasNext());
+
+ // 2nd read works
+ i.next();
+ assertTrue(i.hasNext());
+
+ // 3rd read works
+ i.next();
+ assertTrue(i.hasNext());
+
+ // 4th read blows up as it loops back to 0
+ try {
+ i.next();
+ fail("Loop should have been detected but wasn't!");
+ } catch(RuntimeException e) {
+ // Good, it was detected
+ }
+ assertTrue(i.hasNext());
+
+ fs.close();
+ }
+
+ /**
+ * Tests that we can load some streams that are
+ * stored in the mini stream.
+ */
+ @Test
+ public void testReadMiniStreams() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ POIFSMiniStore ministore = fs.getMiniStore();
+
+ // 178 -> 179 -> 180 -> end
+ POIFSStream stream = new POIFSStream(ministore, 178);
+ Iterator<ByteBuffer> i = stream.getBlockIterator();
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ ByteBuffer b178 = i.next();
+ assertTrue(i.hasNext());
+ assertTrue(i.hasNext());
+ ByteBuffer b179 = i.next();
+ assertTrue(i.hasNext());
+ ByteBuffer b180 = i.next();
+ assertFalse(i.hasNext());
+ assertFalse(i.hasNext());
+ assertFalse(i.hasNext());
+
+ // Check the contents of the 1st block
+ assertEquals((byte)0xfe, b178.get());
+ assertEquals((byte)0xff, b178.get());
+ assertEquals((byte)0x00, b178.get());
+ assertEquals((byte)0x00, b178.get());
+ assertEquals((byte)0x05, b178.get());
+ assertEquals((byte)0x01, b178.get());
+ assertEquals((byte)0x02, b178.get());
+ assertEquals((byte)0x00, b178.get());
+
+ // And the 2nd
+ assertEquals((byte)0x6c, b179.get());
+ assertEquals((byte)0x00, b179.get());
+ assertEquals((byte)0x00, b179.get());
+ assertEquals((byte)0x00, b179.get());
+ assertEquals((byte)0x28, b179.get());
+ assertEquals((byte)0x00, b179.get());
+ assertEquals((byte)0x00, b179.get());
+ assertEquals((byte)0x00, b179.get());
+
+ // And the 3rd
+ assertEquals((byte)0x30, b180.get());
+ assertEquals((byte)0x00, b180.get());
+ assertEquals((byte)0x00, b180.get());
+ assertEquals((byte)0x00, b180.get());
+ assertEquals((byte)0x00, b180.get());
+ assertEquals((byte)0x00, b180.get());
+ assertEquals((byte)0x00, b180.get());
+ assertEquals((byte)0x80, b180.get());
+
+ fs.close();
+ }
+
+ /**
+ * Writing the same amount of data as before
+ */
+ @Test
+ public void testReplaceStream() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+
+ byte[] data = new byte[512];
+ for(int i=0; i<data.length; i++) {
+ data[i] = (byte)(i%256);
+ }
+
+ // 98 is actually the last block in a two block stream...
+ POIFSStream stream = new POIFSStream(fs, 98);
+ stream.updateContents(data);
+
+ // Check the reading of blocks
+ Iterator<ByteBuffer> it = stream.getBlockIterator();
+ assertTrue(it.hasNext());
+ ByteBuffer b = it.next();
+ assertFalse(it.hasNext());
+
+ // Now check the contents
+ data = new byte[512];
+ b.get(data);
+ for(int i=0; i<data.length; i++) {
+ byte exp = (byte)(i%256);
+ assertEquals(exp, data[i]);
+ }
+
+ fs.close();
+ }
+
+ /**
+ * Writes less data than before, some blocks will need
+ * to be freed
+ */
+ @Test
+ public void testReplaceStreamWithLess() throws Exception {
+ try (InputStream is = _inst.openResourceAsStream("BlockSize512.zvi");
+ POIFSFileSystem fs = new POIFSFileSystem(is)) {
+
+ byte[] data = new byte[512];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = (byte) (i % 256);
+ }
+
+ // 97 -> 98 -> end
+ assertEquals(98, fs.getNextBlock(97));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(98));
+
+ // Create a 2 block stream, will become a 1 block one
+ POIFSStream stream = new POIFSStream(fs, 97);
+ stream.updateContents(data);
+
+ // 97 should now be the end, and 98 free
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(97));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(98));
+
+ // Check the reading of blocks
+ Iterator<ByteBuffer> it = stream.getBlockIterator();
+ assertTrue(it.hasNext());
+ ByteBuffer b = it.next();
+ assertFalse(it.hasNext());
+
+ // Now check the contents
+ data = new byte[512];
+ b.get(data);
+ for (int i = 0; i < data.length; i++) {
+ byte exp = (byte) (i % 256);
+ assertEquals(exp, data[i]);
+ }
+ }
+ }
+
+ /**
+ * Writes more data than before, new blocks will be needed
+ */
+ @Test
+ public void testReplaceStreamWithMore() throws Exception {
+ try (InputStream is = _inst.openResourceAsStream("BlockSize512.zvi");
+ POIFSFileSystem fs = new POIFSFileSystem(is)) {
+
+ byte[] data = new byte[512 * 3];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = (byte) (i % 256);
+ }
+
+ // 97 -> 98 -> end
+ assertEquals(98, fs.getNextBlock(97));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(98));
+
+ // 100 is our first free one
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(100));
+
+ // Create a 2 block stream, will become a 3 block one
+ POIFSStream stream = new POIFSStream(fs, 97);
+ stream.updateContents(data);
+
+ // 97 -> 98 -> 100 -> end
+ assertEquals(98, fs.getNextBlock(97));
+ assertEquals(100, fs.getNextBlock(98));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(100));
+
+ // Check the reading of blocks
+ Iterator<ByteBuffer> it = stream.getBlockIterator();
+ int count = 0;
+ while (it.hasNext()) {
+ ByteBuffer b = it.next();
+ data = new byte[512];
+ b.get(data);
+ for (int i = 0; i < data.length; i++) {
+ byte exp = (byte) (i % 256);
+ assertEquals(exp, data[i]);
+ }
+ count++;
+ }
+ assertEquals(3, count);
+ }
+ }
+
+ /**
+ * Writes to a new stream in the file
+ */
+ @Test
+ public void testWriteNewStream() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+
+ // 100 is our first free one
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(100));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(101));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(102));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(103));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(104));
+
+
+ // Add a single block one
+ byte[] data = new byte[512];
+ for(int i=0; i<data.length; i++) {
+ data[i] = (byte)(i%256);
+ }
+
+ POIFSStream stream = new POIFSStream(fs);
+ stream.updateContents(data);
+
+ // Check it was allocated properly
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(100));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(101));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(102));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(103));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(104));
+
+ // And check the contents
+ Iterator<ByteBuffer> it = stream.getBlockIterator();
+ int count = 0;
+ while(it.hasNext()) {
+ ByteBuffer b = it.next();
+ data = new byte[512];
+ b.get(data);
+ for(int i=0; i<data.length; i++) {
+ byte exp = (byte)(i%256);
+ assertEquals(exp, data[i]);
+ }
+ count++;
+ }
+ assertEquals(1, count);
+
+
+ // And a multi block one
+ data = new byte[512*3];
+ for(int i=0; i<data.length; i++) {
+ data[i] = (byte)(i%256);
+ }
+
+ stream = new POIFSStream(fs);
+ stream.updateContents(data);
+
+ // Check it was allocated properly
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(100));
+ assertEquals(102, fs.getNextBlock(101));
+ assertEquals(103, fs.getNextBlock(102));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(103));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(104));
+
+ // And check the contents
+ it = stream.getBlockIterator();
+ count = 0;
+ while(it.hasNext()) {
+ ByteBuffer b = it.next();
+ data = new byte[512];
+ b.get(data);
+ for(int i=0; i<data.length; i++) {
+ byte exp = (byte)(i%256);
+ assertEquals(exp, data[i]);
+ }
+ count++;
+ }
+ assertEquals(3, count);
+
+ // Free it
+ stream.free();
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(100));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(101));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(102));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(103));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(104));
+
+ fs.close();
+ }
+
+ /**
+ * Writes to a new stream in the file, where we've not enough
+ * free blocks so new FAT segments will need to be allocated
+ * to support this
+ */
+ @Test
+ public void testWriteNewStreamExtraFATs() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+
+ // Allocate almost all the blocks
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(100));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(127));
+ for(int i=100; i<127; i++) {
+ fs.setNextBlock(i, POIFSConstants.END_OF_CHAIN);
+ }
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(127));
+ assertTrue(fs.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+
+
+ // Write a 3 block stream
+ byte[] data = new byte[512*3];
+ for(int i=0; i<data.length; i++) {
+ data[i] = (byte)(i%256);
+ }
+ POIFSStream stream = new POIFSStream(fs);
+ stream.updateContents(data);
+
+ // Check we got another BAT
+ assertFalse(fs.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+ assertTrue(fs.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
+
+ // the BAT will be in the first spot of the new block
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(126));
+ assertEquals(129, fs.getNextBlock(127));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(128));
+ assertEquals(130, fs.getNextBlock(129));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(130));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(131));
+
+ fs.close();
+ }
+
+ /**
+ * Replaces data in an existing stream, with a bit
+ * more data than before, in a 4096 byte block file
+ */
+ @Test
+ public void testWriteStream4096() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
+
+ // 0 -> 1 -> 2 -> end
+ assertEquals(1, fs.getNextBlock(0));
+ assertEquals(2, fs.getNextBlock(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
+ assertEquals(4, fs.getNextBlock(3));
+
+ // First free one is at 15
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(14));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(15));
+
+
+ // Write a 5 block file
+ byte[] data = new byte[4096*5];
+ for(int i=0; i<data.length; i++) {
+ data[i] = (byte)(i%256);
+ }
+ POIFSStream stream = new POIFSStream(fs, 0);
+ stream.updateContents(data);
+
+
+ // Check it
+ assertEquals(1, fs.getNextBlock(0));
+ assertEquals(2, fs.getNextBlock(1));
+ assertEquals(15, fs.getNextBlock(2)); // Jumps
+ assertEquals(4, fs.getNextBlock(3)); // Next stream
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(14));
+ assertEquals(16, fs.getNextBlock(15)); // Continues
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(16)); // Ends
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(17)); // Free
+
+ // Check the contents too
+ Iterator<ByteBuffer> it = stream.getBlockIterator();
+ int count = 0;
+ while(it.hasNext()) {
+ ByteBuffer b = it.next();
+ data = new byte[512];
+ b.get(data);
+ for(int i=0; i<data.length; i++) {
+ byte exp = (byte)(i%256);
+ assertEquals(exp, data[i]);
+ }
+ count++;
+ }
+ assertEquals(5, count);
+
+ fs.close();
+ }
+
+ /**
+ * Tests that we can write into the mini stream
+ */
+ @Test
+ public void testWriteMiniStreams() throws Exception {
+ try (InputStream is = _inst.openResourceAsStream("BlockSize512.zvi");
+ POIFSFileSystem fs = new POIFSFileSystem(is)) {
+
+ POIFSMiniStore ministore = fs.getMiniStore();
+
+ // 178 -> 179 -> 180 -> end
+ assertEquals(179, ministore.getNextBlock(178));
+ assertEquals(180, ministore.getNextBlock(179));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
+
+
+ // Try writing 3 full blocks worth
+ byte[] data = new byte[64 * 3];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = (byte) i;
+ }
+ POIFSStream stream = new POIFSStream(ministore, 178);
+ stream.updateContents(data);
+
+ // Check
+ assertEquals(179, ministore.getNextBlock(178));
+ assertEquals(180, ministore.getNextBlock(179));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
+
+ stream = new POIFSStream(ministore, 178);
+ Iterator<ByteBuffer> it = stream.getBlockIterator();
+ ByteBuffer b178 = it.next();
+ ByteBuffer b179 = it.next();
+ ByteBuffer b180 = it.next();
+ assertFalse(it.hasNext());
+
+ assertEquals((byte) 0x00, b178.get());
+ assertEquals((byte) 0x01, b178.get());
+ assertEquals((byte) 0x40, b179.get());
+ assertEquals((byte) 0x41, b179.get());
+ assertEquals((byte) 0x80, b180.get());
+ assertEquals((byte) 0x81, b180.get());
+
+
+ // Try writing just into 3 blocks worth
+ data = new byte[64 * 2 + 12];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = (byte) (i + 4);
+ }
+ stream = new POIFSStream(ministore, 178);
+ stream.updateContents(data);
+
+ // Check
+ assertEquals(179, ministore.getNextBlock(178));
+ assertEquals(180, ministore.getNextBlock(179));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(180));
+
+ stream = new POIFSStream(ministore, 178);
+ it = stream.getBlockIterator();
+ b178 = it.next();
+ b179 = it.next();
+ b180 = it.next();
+ assertFalse(it.hasNext());
+
+ assertEquals((byte) 0x04, b178.get(0));
+ assertEquals((byte) 0x05, b178.get(1));
+ assertEquals((byte) 0x44, b179.get(0));
+ assertEquals((byte) 0x45, b179.get(1));
+ assertEquals((byte) 0x84, b180.get(0));
+ assertEquals((byte) 0x85, b180.get(1));
+
+
+ // Try writing 1, should truncate
+ data = new byte[12];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = (byte) (i + 9);
+ }
+ stream = new POIFSStream(ministore, 178);
+ stream.updateContents(data);
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(178));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(179));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(180));
+
+ stream = new POIFSStream(ministore, 178);
+ it = stream.getBlockIterator();
+ b178 = it.next();
+ assertFalse(it.hasNext());
+
+ assertEquals((byte) 0x09, b178.get(0));
+ assertEquals((byte) 0x0a, b178.get(1));
+
+
+ // Try writing 5, should extend
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(178));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(179));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(180));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(181));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(182));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(183));
+
+ data = new byte[64 * 4 + 12];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = (byte) (i + 3);
+ }
+ stream = new POIFSStream(ministore, 178);
+ stream.updateContents(data);
+
+ assertEquals(179, ministore.getNextBlock(178));
+ assertEquals(180, ministore.getNextBlock(179));
+ assertEquals(181, ministore.getNextBlock(180));
+ assertEquals(182, ministore.getNextBlock(181));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(182));
+
+ stream = new POIFSStream(ministore, 178);
+ it = stream.getBlockIterator();
+ b178 = it.next();
+ b179 = it.next();
+ b180 = it.next();
+ ByteBuffer b181 = it.next();
+ ByteBuffer b182 = it.next();
+ assertFalse(it.hasNext());
+
+ assertEquals((byte) 0x03, b178.get(0));
+ assertEquals((byte) 0x04, b178.get(1));
+ assertEquals((byte) 0x43, b179.get(0));
+ assertEquals((byte) 0x44, b179.get(1));
+ assertEquals((byte) 0x83, b180.get(0));
+ assertEquals((byte) 0x84, b180.get(1));
+ assertEquals((byte) 0xc3, b181.get(0));
+ assertEquals((byte) 0xc4, b181.get(1));
+ assertEquals((byte) 0x03, b182.get(0));
+ assertEquals((byte) 0x04, b182.get(1));
+
+
+ // Write lots, so it needs another big block
+ ministore.getBlockAt(183);
+ try {
+ ministore.getBlockAt(184);
+ fail("Block 184 should be off the end of the list");
+ } catch (IndexOutOfBoundsException e) {
+ }
+
+ data = new byte[64 * 6 + 12];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = (byte) (i + 1);
+ }
+ stream = new POIFSStream(ministore, 178);
+ stream.updateContents(data);
+
+ // Should have added 2 more blocks to the chain
+ assertEquals(179, ministore.getNextBlock(178));
+ assertEquals(180, ministore.getNextBlock(179));
+ assertEquals(181, ministore.getNextBlock(180));
+ assertEquals(182, ministore.getNextBlock(181));
+ assertEquals(183, ministore.getNextBlock(182));
+ assertEquals(184, ministore.getNextBlock(183));
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(184));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, ministore.getNextBlock(185));
+
+ // Block 184 should exist
+ ministore.getBlockAt(183);
+ ministore.getBlockAt(184);
+ ministore.getBlockAt(185);
+
+ // Check contents
+ stream = new POIFSStream(ministore, 178);
+ it = stream.getBlockIterator();
+ b178 = it.next();
+ b179 = it.next();
+ b180 = it.next();
+ b181 = it.next();
+ b182 = it.next();
+ ByteBuffer b183 = it.next();
+ ByteBuffer b184 = it.next();
+ assertFalse(it.hasNext());
+
+ assertEquals((byte) 0x01, b178.get(0));
+ assertEquals((byte) 0x02, b178.get(1));
+ assertEquals((byte) 0x41, b179.get(0));
+ assertEquals((byte) 0x42, b179.get(1));
+ assertEquals((byte) 0x81, b180.get(0));
+ assertEquals((byte) 0x82, b180.get(1));
+ assertEquals((byte) 0xc1, b181.get(0));
+ assertEquals((byte) 0xc2, b181.get(1));
+ assertEquals((byte) 0x01, b182.get(0));
+ assertEquals((byte) 0x02, b182.get(1));
+ assertEquals((byte) 0x41, b183.get(0));
+ assertEquals((byte) 0x42, b183.get(1));
+ assertEquals((byte) 0x81, b184.get(0));
+ assertEquals((byte) 0x82, b184.get(1));
+
+ }
+ }
+
+ /**
+ * Craft a nasty file with a loop, and ensure we don't get stuck
+ */
+ @Test
+ public void testWriteFailsOnLoop() throws Exception {
+ try (POIFSFileSystem fs = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"))) {
+
+ // Hack the FAT so that it goes 0->1->2->0
+ fs.setNextBlock(0, 1);
+ fs.setNextBlock(1, 2);
+ fs.setNextBlock(2, 0);
+
+ // Try to write a large amount, should fail on the write
+ byte[] data = new byte[512 * 4];
+ POIFSStream stream = new POIFSStream(fs, 0);
+ try {
+ stream.updateContents(data);
+ fail("Loop should have been detected but wasn't!");
+ } catch (IllegalStateException e) {
+ }
+
+ // Now reset, and try on a small bit
+ // Should fail during the freeing set
+ fs.setNextBlock(0, 1);
+ fs.setNextBlock(1, 2);
+ fs.setNextBlock(2, 0);
+
+ data = new byte[512];
+ stream = new POIFSStream(fs, 0);
+ try {
+ stream.updateContents(data);
+ fail("Loop should have been detected but wasn't!");
+ } catch (IllegalStateException e) {
+ }
+ }
+ }
+
+ /**
+ * Tests adding a new stream, writing and reading it.
+ */
+ @Test
+ public void testReadWriteNewStream() throws Exception {
+ try (POIFSFileSystem fs = new POIFSFileSystem()) {
+ POIFSStream stream = new POIFSStream(fs);
+
+ // Check our filesystem has Properties then BAT
+ assertEquals(2, fs.getFreeBlock());
+ BATBlock bat = fs.getBATBlockAndIndex(0).getBlock();
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, bat.getValueAt(1));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(2));
+
+ // Check the stream as-is
+ assertEquals(POIFSConstants.END_OF_CHAIN, stream.getStartBlock());
+ try {
+ stream.getBlockIterator();
+ fail("Shouldn't be able to get an iterator before writing");
+ } catch (IllegalStateException e) {
+ }
+
+ // Write in two blocks
+ byte[] data = new byte[512 + 20];
+ for (int i = 0; i < 512; i++) {
+ data[i] = (byte) (i % 256);
+ }
+ for (int i = 512; i < data.length; i++) {
+ data[i] = (byte) (i % 256 + 100);
+ }
+ stream.updateContents(data);
+
+ // Check now
+ assertEquals(4, fs.getFreeBlock());
+ bat = fs.getBATBlockAndIndex(0).getBlock();
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, bat.getValueAt(1));
+ assertEquals(3, bat.getValueAt(2));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(3));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(4));
+
+
+ Iterator<ByteBuffer> it = stream.getBlockIterator();
+ assertTrue(it.hasNext());
+ ByteBuffer b = it.next();
+
+ byte[] read = new byte[512];
+ b.get(read);
+ for (int i = 0; i < read.length; i++) {
+ assertEquals("Wrong value at " + i, data[i], read[i]);
+ }
+
+ assertTrue(it.hasNext());
+ b = it.next();
+
+ read = new byte[512];
+ b.get(read);
+ for (int i = 0; i < 20; i++) {
+ assertEquals(data[i + 512], read[i]);
+ }
+ for (int i = 20; i < read.length; i++) {
+ assertEquals(0, read[i]);
+ }
+
+ assertFalse(it.hasNext());
+ }
+ }
+
+ /**
+ * Writes a stream, then replaces it
+ */
+ @Test
+ public void testWriteThenReplace() throws Exception {
+ POIFSFileSystem fs = new POIFSFileSystem();
+
+ // Starts empty, other that Properties and BAT
+ BATBlock bat = fs.getBATBlockAndIndex(0).getBlock();
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(2));
+
+ // Write something that uses a main stream
+ byte[] main4106 = new byte[4106];
+ main4106[0] = -10;
+ main4106[4105] = -11;
+ fs.getRoot().createDocument("Normal", new ByteArrayInputStream(main4106));
+
+ // Should have used 9 blocks
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
+ assertEquals(3, bat.getValueAt(2));
+ assertEquals(4, bat.getValueAt(3));
+ assertEquals(5, bat.getValueAt(4));
+ assertEquals(6, bat.getValueAt(5));
+ assertEquals(7, bat.getValueAt(6));
+ assertEquals(8, bat.getValueAt(7));
+ assertEquals(9, bat.getValueAt(8));
+ assertEquals(10, bat.getValueAt(9));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(10));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
+
+ DocumentEntry normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
+ assertEquals(4106, normal.getSize());
+ assertEquals(4106, ((DocumentNode)normal).getProperty().getSize());
+
+
+ // Replace with one still big enough for a main stream, but one block smaller
+ byte[] main4096 = new byte[4096];
+ main4096[0] = -10;
+ main4096[4095] = -11;
+
+ DocumentOutputStream nout = new DocumentOutputStream(normal);
+ nout.write(main4096);
+ nout.close();
+
+ // Will have dropped to 8
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
+ assertEquals(3, bat.getValueAt(2));
+ assertEquals(4, bat.getValueAt(3));
+ assertEquals(5, bat.getValueAt(4));
+ assertEquals(6, bat.getValueAt(5));
+ assertEquals(7, bat.getValueAt(6));
+ assertEquals(8, bat.getValueAt(7));
+ assertEquals(9, bat.getValueAt(8));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(9));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(10));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
+
+ normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
+ assertEquals(4096, normal.getSize());
+ assertEquals(4096, ((DocumentNode)normal).getProperty().getSize());
+
+
+ // Write and check
+ fs = writeOutAndReadBack(fs);
+ bat = fs.getBATBlockAndIndex(0).getBlock();
+
+ // No change after write
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0)); // Properties
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
+ assertEquals(3, bat.getValueAt(2));
+ assertEquals(4, bat.getValueAt(3));
+ assertEquals(5, bat.getValueAt(4));
+ assertEquals(6, bat.getValueAt(5));
+ assertEquals(7, bat.getValueAt(6));
+ assertEquals(8, bat.getValueAt(7));
+ assertEquals(9, bat.getValueAt(8));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(9)); // End of Normal
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(10));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
+
+ normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
+ assertEquals(4096, normal.getSize());
+ assertEquals(4096, ((DocumentNode)normal).getProperty().getSize());
+
+
+ // Make longer, take 1 block at the end
+ normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
+ nout = new DocumentOutputStream(normal);
+ nout.write(main4106);
+ nout.close();
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK,bat.getValueAt(1));
+ assertEquals(3, bat.getValueAt(2));
+ assertEquals(4, bat.getValueAt(3));
+ assertEquals(5, bat.getValueAt(4));
+ assertEquals(6, bat.getValueAt(5));
+ assertEquals(7, bat.getValueAt(6));
+ assertEquals(8, bat.getValueAt(7));
+ assertEquals(9, bat.getValueAt(8));
+ assertEquals(10, bat.getValueAt(9));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(10)); // Normal
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(12));
+
+ normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
+ assertEquals(4106, normal.getSize());
+ assertEquals(4106, ((DocumentNode)normal).getProperty().getSize());
+
+
+ // Make it small, will trigger the SBAT stream and free lots up
+ byte[] mini = new byte[] { 42, 0, 1, 2, 3, 4, 42 };
+ normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
+ nout = new DocumentOutputStream(normal);
+ nout.write(mini);
+ nout.close();
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, bat.getValueAt(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(2)); // SBAT
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(3)); // Mini Stream
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(4));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(5));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(6));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(7));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(8));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(9));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(10));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(11));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(12));
+
+ normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
+ assertEquals(7, normal.getSize());
+ assertEquals(7, ((DocumentNode)normal).getProperty().getSize());
+
+
+ // Finally back to big again
+ nout = new DocumentOutputStream(normal);
+ nout.write(main4096);
+ nout.close();
+
+ // Will keep the mini stream, now empty
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, bat.getValueAt(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(2)); // SBAT
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(3)); // Mini Stream
+ assertEquals(5, bat.getValueAt(4));
+ assertEquals(6, bat.getValueAt(5));
+ assertEquals(7, bat.getValueAt(6));
+ assertEquals(8, bat.getValueAt(7));
+ assertEquals(9, bat.getValueAt(8));
+ assertEquals(10, bat.getValueAt(9));
+ assertEquals(11, bat.getValueAt(10));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(11));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(12));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(13));
+
+ normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
+ assertEquals(4096, normal.getSize());
+ assertEquals(4096, ((DocumentNode)normal).getProperty().getSize());
+
+
+ // Save, re-load, re-check
+ fs = writeOutAndReadBack(fs);
+ bat = fs.getBATBlockAndIndex(0).getBlock();
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, bat.getValueAt(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(2)); // SBAT
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(3)); // Mini Stream
+ assertEquals(5, bat.getValueAt(4));
+ assertEquals(6, bat.getValueAt(5));
+ assertEquals(7, bat.getValueAt(6));
+ assertEquals(8, bat.getValueAt(7));
+ assertEquals(9, bat.getValueAt(8));
+ assertEquals(10, bat.getValueAt(9));
+ assertEquals(11, bat.getValueAt(10));
+ assertEquals(POIFSConstants.END_OF_CHAIN, bat.getValueAt(11));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(12));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, bat.getValueAt(13));
+
+ normal = (DocumentEntry)fs.getRoot().getEntry("Normal");
+ assertEquals(4096, normal.getSize());
+ assertEquals(4096, ((DocumentNode)normal).getProperty().getSize());
+
+ fs.close();
+ }
+
+
+ /**
+ * Returns test files with 512 byte and 4k block sizes, loaded
+ * both from InputStreams and Files
+ */
+ private POIFSFileSystem[] get512and4kFileAndInput() throws IOException {
+ POIFSFileSystem fsA = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+ POIFSFileSystem fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ POIFSFileSystem fsC = new POIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
+ POIFSFileSystem fsD = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
+ return new POIFSFileSystem[] {fsA,fsB,fsC,fsD};
+ }
+
+ private static void assertBATCount(POIFSFileSystem fs, int expectedBAT, int expectedXBAT) throws IOException {
+ int foundBAT = 0;
+ int foundXBAT = 0;
+ int sz = (int)(fs.size() / fs.getBigBlockSize());
+ for (int i=0; i<sz; i++) {
+ if(fs.getNextBlock(i) == POIFSConstants.FAT_SECTOR_BLOCK) {
+ foundBAT++;
+ }
+ if(fs.getNextBlock(i) == POIFSConstants.DIFAT_SECTOR_BLOCK) {
+ foundXBAT++;
+ }
+ }
+ assertEquals("Wrong number of BATs", expectedBAT, foundBAT);
+ assertEquals("Wrong number of XBATs with " + expectedBAT + " BATs", expectedXBAT, foundXBAT);
+ }
+ private void assertContentsMatches(byte[] expected, DocumentEntry doc) throws IOException {
+ DocumentInputStream inp = new DocumentInputStream(doc);
+ byte[] contents = new byte[doc.getSize()];
+ assertEquals(doc.getSize(), inp.read(contents));
+ inp.close();
+
+ if (expected != null) {
+ assertThat(expected, equalTo(contents));
+ }
+ }
+
+ private static HeaderBlock writeOutAndReadHeader(POIFSFileSystem fs) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ fs.writeFilesystem(baos);
+
+ return new HeaderBlock(new ByteArrayInputStream(baos.toByteArray()));
+ }
+
+ private static POIFSFileSystem writeOutAndReadBack(POIFSFileSystem original) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ original.writeFilesystem(baos);
+ return new POIFSFileSystem(new ByteArrayInputStream(baos.toByteArray()));
+ }
+
+ private static POIFSFileSystem writeOutFileAndReadBack(POIFSFileSystem original) throws IOException {
+ final File file = TempFile.createTempFile("TestPOIFS", ".ole2");
+ try (OutputStream fout = new FileOutputStream(file)) {
+ original.writeFilesystem(fout);
+ }
+ return new POIFSFileSystem(file, false);
+ }
+
+ @Test
+ public void basicOpen() throws IOException {
+ POIFSFileSystem fsA, fsB;
+
+ // With a simple 512 block file
+ fsA = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+ fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB}) {
+ assertEquals(512, fs.getBigBlockSize());
+ }
+ fsA.close();
+ fsB.close();
+
+ // Now with a simple 4096 block file
+ fsA = new POIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
+ fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB}) {
+ assertEquals(4096, fs.getBigBlockSize());
+ }
+ fsA.close();
+ fsB.close();
+ }
+
+ @Test
+ public void propertiesAndFatOnRead() throws IOException {
+ POIFSFileSystem fsA, fsB;
+
+ // With a simple 512 block file
+ fsA = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+ fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB}) {
+ // Check the FAT was properly processed:
+ // Verify we only got one block
+ fs.getBATBlockAndIndex(0);
+ fs.getBATBlockAndIndex(1);
+ try {
+ fs.getBATBlockAndIndex(140);
+ fail("Should only be one BAT, but a 2nd was found");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+
+ // Verify a few next offsets
+ // 97 -> 98 -> END
+ assertEquals(98, fs.getNextBlock(97));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(98));
+
+
+ // Check the properties
+ PropertyTable props = fs._get_property_table();
+ assertEquals(90, props.getStartBlock());
+ assertEquals(7, props.countBlocks());
+
+ // Root property tells us about the Mini Stream
+ RootProperty root = props.getRoot();
+ assertEquals("Root Entry", root.getName());
+ assertEquals(11564, root.getSize());
+ assertEquals(0, root.getStartBlock());
+
+ // Check its children too
+ Property prop;
+ Iterator<Property> pi = root.getChildren();
+ prop = pi.next();
+ assertEquals("Thumbnail", prop.getName());
+ prop = pi.next();
+ assertEquals("\u0005DocumentSummaryInformation", prop.getName());
+ prop = pi.next();
+ assertEquals("\u0005SummaryInformation", prop.getName());
+ prop = pi.next();
+ assertEquals("Image", prop.getName());
+ prop = pi.next();
+ assertEquals("Tags", prop.getName());
+ assertFalse(pi.hasNext());
+
+
+ // Check the SBAT (Small Blocks FAT) was properly processed
+ POIFSMiniStore ministore = fs.getMiniStore();
+
+ // Verify we only got two SBAT blocks
+ ministore.getBATBlockAndIndex(0);
+ ministore.getBATBlockAndIndex(128);
+ try {
+ ministore.getBATBlockAndIndex(256);
+ fail("Should only be two SBATs, but a 3rd was found");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+
+ // Verify a few offsets: 0->50 is a stream
+ for(int i=0; i<50; i++) {
+ assertEquals(i+1, ministore.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(50));
+
+ fs.close();
+ }
+
+ // Now with a simple 4096 block file
+ fsA = new POIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
+ fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB}) {
+ // Check the FAT was properly processed
+ // Verify we only got one block
+ fs.getBATBlockAndIndex(0);
+ fs.getBATBlockAndIndex(1);
+ try {
+ fs.getBATBlockAndIndex(1040);
+ fail("Should only be one BAT, but a 2nd was found");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+
+ // Verify a few next offsets
+ // 0 -> 1 -> 2 -> END
+ assertEquals(1, fs.getNextBlock(0));
+ assertEquals(2, fs.getNextBlock(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
+
+
+ // Check the properties
+ PropertyTable props = fs._get_property_table();
+ assertEquals(12, props.getStartBlock());
+ assertEquals(1, props.countBlocks());
+
+ // Root property tells us about the Mini Stream
+ RootProperty root = props.getRoot();
+ assertEquals("Root Entry", root.getName());
+ assertEquals(11564, root.getSize());
+ assertEquals(0, root.getStartBlock());
+
+ // Check its children too
+ Property prop;
+ Iterator<Property> pi = root.getChildren();
+ prop = pi.next();
+ assertEquals("Thumbnail", prop.getName());
+ prop = pi.next();
+ assertEquals("\u0005DocumentSummaryInformation", prop.getName());
+ prop = pi.next();
+ assertEquals("\u0005SummaryInformation", prop.getName());
+ prop = pi.next();
+ assertEquals("Image", prop.getName());
+ prop = pi.next();
+ assertEquals("Tags", prop.getName());
+ assertFalse(pi.hasNext());
+
+
+ // Check the SBAT (Small Blocks FAT) was properly processed
+ POIFSMiniStore ministore = fs.getMiniStore();
+
+ // Verify we only got one SBAT block
+ ministore.getBATBlockAndIndex(0);
+ ministore.getBATBlockAndIndex(128);
+ ministore.getBATBlockAndIndex(1023);
+ try {
+ ministore.getBATBlockAndIndex(1024);
+ fail("Should only be one SBAT, but a 2nd was found");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+
+ // Verify a few offsets: 0->50 is a stream
+ for(int i=0; i<50; i++) {
+ assertEquals(i+1, ministore.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, ministore.getNextBlock(50));
+
+ fs.close();
+ }
+ }
+
+ /**
+ * Check that for a given block, we can correctly figure
+ * out what the next one is
+ */
+ @Test
+ public void nextBlock() throws IOException {
+ POIFSFileSystem fsA = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+ POIFSFileSystem fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB}) {
+ // 0 -> 21 are simple
+ for(int i=0; i<21; i++) {
+ assertEquals(i+1, fs.getNextBlock(i));
+ }
+ // 21 jumps to 89, then ends
+ assertEquals(89, fs.getNextBlock(21));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(89));
+
+ // 22 -> 88 simple sequential stream
+ for(int i=22; i<88; i++) {
+ assertEquals(i+1, fs.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(88));
+
+ // 90 -> 96 is another stream
+ for(int i=90; i<96; i++) {
+ assertEquals(i+1, fs.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(96));
+
+ // 97+98 is another
+ assertEquals(98, fs.getNextBlock(97));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(98));
+
+ // 99 is our FAT block
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs.getNextBlock(99));
+
+ // 100 onwards is free
+ for(int i=100; i<fs.getBigBlockSizeDetails().getBATEntriesPerBlock(); i++) {
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(i));
+ }
+
+ fs.close();
+ }
+
+ // Quick check on 4096 byte blocks too
+ fsA = new POIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
+ fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB}) {
+ // 0 -> 1 -> 2 -> end
+ assertEquals(1, fs.getNextBlock(0));
+ assertEquals(2, fs.getNextBlock(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(2));
+
+ // 4 -> 11 then end
+ for(int i=4; i<11; i++) {
+ assertEquals(i+1, fs.getNextBlock(i));
+ }
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs.getNextBlock(11));
+
+ fs.close();
+ }
+ }
+
+ /**
+ * Check we get the right data back for each block
+ */
+ @Test
+ public void getBlock() throws IOException {
+ POIFSFileSystem fsA = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+ POIFSFileSystem fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB}) {
+ ByteBuffer b;
+
+ // The 0th block is the first data block
+ b = fs.getBlockAt(0);
+ assertEquals((byte)0x9e, b.get());
+ assertEquals((byte)0x75, b.get());
+ assertEquals((byte)0x97, b.get());
+ assertEquals((byte)0xf6, b.get());
+
+ // And the next block
+ b = fs.getBlockAt(1);
+ assertEquals((byte)0x86, b.get());
+ assertEquals((byte)0x09, b.get());
+ assertEquals((byte)0x22, b.get());
+ assertEquals((byte)0xfb, b.get());
+
+ // Check the final block too
+ b = fs.getBlockAt(99);
+ assertEquals((byte)0x01, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x02, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+
+ fs.close();
+ }
+
+ // Quick check on 4096 byte blocks too
+ fsA = new POIFSFileSystem(_inst.getFile("BlockSize4096.zvi"));
+ fsB = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize4096.zvi"));
+ for(POIFSFileSystem fs : new POIFSFileSystem[] {fsA,fsB}) {
+ ByteBuffer b;
+
+ // The 0th block is the first data block
+ b = fs.getBlockAt(0);
+ assertEquals((byte)0x9e, b.get());
+ assertEquals((byte)0x75, b.get());
+ assertEquals((byte)0x97, b.get());
+ assertEquals((byte)0xf6, b.get());
+
+ // And the next block
+ b = fs.getBlockAt(1);
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x03, b.get());
+ assertEquals((byte)0x00, b.get());
+
+ // The 14th block is the FAT
+ b = fs.getBlockAt(14);
+ assertEquals((byte)0x01, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x02, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+ assertEquals((byte)0x00, b.get());
+
+ fs.close();
+ }
+ }
+
+ /**
+ * Ask for free blocks where there are some already
+ * to be had from the FAT
+ */
+ @Test
+ public void getFreeBlockWithSpare() throws IOException {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.getFile("BlockSize512.zvi"));
+
+ // Our first BAT block has spares
+ assertTrue(fs.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+
+ // First free one is 100
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(100));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(101));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(102));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs.getNextBlock(103));
+
+ // Ask, will get 100
+ assertEquals(100, fs.getFreeBlock());
+
+ // Ask again, will still get 100 as not written to
+ assertEquals(100, fs.getFreeBlock());
+
+ // Allocate it, then ask again
+ fs.setNextBlock(100, POIFSConstants.END_OF_CHAIN);
+ assertEquals(101, fs.getFreeBlock());
+
+ // All done
+ fs.close();
+ }
+
+ /**
+ * Ask for free blocks where no free ones exist, and so the
+ * file needs to be extended and another BAT/XBAT added
+ */
+ @Test
+ public void getFreeBlockWithNoneSpare() throws IOException {
+ POIFSFileSystem fs1 = new POIFSFileSystem(_inst.openResourceAsStream("BlockSize512.zvi"));
+ int free;
+
+ // We have one BAT at block 99
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(99));
+ assertBATCount(fs1, 1, 0);
+
+ // We've spare ones from 100 to 128
+ for(int i=100; i<128; i++) {
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(i));
+ }
+
+ // Check our BAT knows it's free
+ assertTrue(fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+
+ // Allocate all the spare ones
+ for(int i=100; i<128; i++) {
+ fs1.setNextBlock(i, POIFSConstants.END_OF_CHAIN);
+ }
+
+ // BAT is now full, but there's only the one
+ assertFalse(fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+ try {
+ assertFalse(fs1.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
+ fail("Should only be one BAT");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+ assertBATCount(fs1, 1, 0);
+
+
+ // Now ask for a free one, will need to extend the file
+ assertEquals(129, fs1.getFreeBlock());
+
+ assertFalse(fs1.getBATBlockAndIndex(0).getBlock().hasFreeSectors());
+ assertTrue(fs1.getBATBlockAndIndex(128).getBlock().hasFreeSectors());
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(128));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(129));
+
+ // We now have 2 BATs, but no XBATs
+ assertBATCount(fs1, 2, 0);
+
+
+ // Fill up to hold 109 BAT blocks
+ for(int i=0; i<109; i++) {
+ fs1.getFreeBlock();
+ int startOffset = i*128;
+ while( fs1.getBATBlockAndIndex(startOffset).getBlock().hasFreeSectors() ) {
+ free = fs1.getFreeBlock();
+ fs1.setNextBlock(free, POIFSConstants.END_OF_CHAIN);
+ }
+ }
+
+ assertFalse(fs1.getBATBlockAndIndex(109 * 128 - 1).getBlock().hasFreeSectors());
+ try {
+ assertFalse(fs1.getBATBlockAndIndex(109 * 128).getBlock().hasFreeSectors());
+ fail("Should only be 109 BATs");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+
+ // We now have 109 BATs, but no XBATs
+ assertBATCount(fs1, 109, 0);
+
+
+ // Ask for it to be written out, and check the header
+ HeaderBlock header = writeOutAndReadHeader(fs1);
+ assertEquals(109, header.getBATCount());
+ assertEquals(0, header.getXBATCount());
+
+
+ // Ask for another, will get our first XBAT
+ free = fs1.getFreeBlock();
+ assertTrue("Had: " + free, free > 0);
+
+ assertFalse(fs1.getBATBlockAndIndex(109 * 128 - 1).getBlock().hasFreeSectors());
+ assertTrue(fs1.getBATBlockAndIndex(110 * 128 - 1).getBlock().hasFreeSectors());
+ try {
+ assertFalse(fs1.getBATBlockAndIndex(110 * 128).getBlock().hasFreeSectors());
+ fail("Should only be 110 BATs");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+ assertBATCount(fs1, 110, 1);
+
+ header = writeOutAndReadHeader(fs1);
+ assertEquals(110, header.getBATCount());
+ assertEquals(1, header.getXBATCount());
+
+
+ // Fill the XBAT, which means filling 127 BATs
+ for(int i=109; i<109+127; i++) {
+ fs1.getFreeBlock();
+ int startOffset = i*128;
+ while( fs1.getBATBlockAndIndex(startOffset).getBlock().hasFreeSectors() ) {
+ free = fs1.getFreeBlock();
+ fs1.setNextBlock(free, POIFSConstants.END_OF_CHAIN);
+ }
+ assertBATCount(fs1, i+1, 1);
+ }
+
+ // Should now have 109+127 = 236 BATs
+ assertFalse(fs1.getBATBlockAndIndex(236 * 128 - 1).getBlock().hasFreeSectors());
+ try {
+ assertFalse(fs1.getBATBlockAndIndex(236 * 128).getBlock().hasFreeSectors());
+ fail("Should only be 236 BATs");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+ assertBATCount(fs1, 236, 1);
+
+
+ // Ask for another, will get our 2nd XBAT
+ free = fs1.getFreeBlock();
+ assertTrue("Had: " + free, free > 0);
+
+ assertFalse(fs1.getBATBlockAndIndex(236 * 128 - 1).getBlock().hasFreeSectors());
+ assertTrue(fs1.getBATBlockAndIndex(237 * 128 - 1).getBlock().hasFreeSectors());
+ try {
+ assertFalse(fs1.getBATBlockAndIndex(237 * 128).getBlock().hasFreeSectors());
+ fail("Should only be 237 BATs");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+
+
+ // Check the counts now
+ assertBATCount(fs1, 237, 2);
+
+ // Check the header
+ header = writeOutAndReadHeader(fs1);
+ assertNotNull(header);
+
+ // Now, write it out, and read it back in again fully
+ POIFSFileSystem fs2 = writeOutAndReadBack(fs1);
+ fs1.close();
+
+ // Check that it is seen correctly
+ assertBATCount(fs2, 237, 2);
+
+ assertFalse(fs2.getBATBlockAndIndex(236 * 128 - 1).getBlock().hasFreeSectors());
+ assertTrue(fs2.getBATBlockAndIndex(237 * 128 - 1).getBlock().hasFreeSectors());
+ try {
+ assertFalse(fs2.getBATBlockAndIndex(237 * 128).getBlock().hasFreeSectors());
+ fail("Should only be 237 BATs");
+ } catch(IndexOutOfBoundsException e) {
+ // expected here
+ }
+
+
+ // All done
+ fs2.close();
+ }
+
+ /**
+ * Test that we can correctly get the list of directory
+ * entries, and the details on the files in them
+ */
+ @Test
+ public void listEntries() throws IOException {
+ for(POIFSFileSystem fs : get512and4kFileAndInput()) {
+ DirectoryEntry root = fs.getRoot();
+ assertEquals(5, root.getEntryCount());
+
+ // Check by the names
+ Entry thumbnail = root.getEntry("Thumbnail");
+ Entry dsi = root.getEntry("\u0005DocumentSummaryInformation");
+ Entry si = root.getEntry("\u0005SummaryInformation");
+ Entry image = root.getEntry("Image");
+ Entry tags = root.getEntry("Tags");
+
+ assertFalse(thumbnail.isDirectoryEntry());
+ assertFalse(dsi.isDirectoryEntry());
+ assertFalse(si.isDirectoryEntry());
+ assertTrue(image.isDirectoryEntry());
+ assertFalse(tags.isDirectoryEntry());
+
+ // Check via the iterator
+ Iterator<Entry> it = root.getEntries();
+ assertEquals(thumbnail.getName(), it.next().getName());
+ assertEquals(dsi.getName(), it.next().getName());
+ assertEquals(si.getName(), it.next().getName());
+ assertEquals(image.getName(), it.next().getName());
+ assertEquals(tags.getName(), it.next().getName());
+
+ // Look inside another
+ DirectoryEntry imageD = (DirectoryEntry)image;
+ assertEquals(7, imageD.getEntryCount());
+
+ fs.close();
+ }
+ }
+
+ /**
+ * Tests that we can get the correct contents for
+ * a document in the filesystem
+ */
+ @Test
+ public void getDocumentEntry() throws Exception {
+ for(POIFSFileSystem fs : get512and4kFileAndInput()) {
+ DirectoryEntry root = fs.getRoot();
+ Entry si = root.getEntry("\u0005SummaryInformation");
+
+ assertTrue(si.isDocumentEntry());
+ DocumentNode doc = (DocumentNode)si;
+
+ // Check we can read it
+ assertContentsMatches(null, doc);
+
+ // Now try to build the property set
+ DocumentInputStream inp = new DocumentInputStream(doc);
+ PropertySet ps = PropertySetFactory.create(inp);
+ SummaryInformation inf = (SummaryInformation)ps;
+
+ // Check some bits in it
+ assertNull(inf.getApplicationName());
+ assertNull(inf.getAuthor());
+ assertNull(inf.getSubject());
+ assertEquals(131333, inf.getOSVersion());
+
+ // Finish with this one
+ inp.close();
+
+
+ // Try the other summary information
+ si = root.getEntry("\u0005DocumentSummaryInformation");
+ assertTrue(si.isDocumentEntry());
+ doc = (DocumentNode)si;
+ assertContentsMatches(null, doc);
+
+ inp = new DocumentInputStream(doc);
+ ps = PropertySetFactory.create(inp);
+ DocumentSummaryInformation dinf = (DocumentSummaryInformation)ps;
+ assertEquals(131333, dinf.getOSVersion());
+
+ fs.close();
+ }
+ }
+
+ /**
+ * Read a file, write it and read it again.
+ * Then, alter+add some streams, write and read
+ */
+ @Test
+ public void readWriteRead() throws Exception {
+ SummaryInformation sinf;
+ DocumentSummaryInformation dinf;
+ DirectoryEntry root, testDir;
+
+ for(POIFSFileSystem fs1 : get512and4kFileAndInput()) {
+ // Check we can find the entries we expect
+ root = fs1.getRoot();
+ assertEquals(5, root.getEntryCount());
+ assertThat(root.getEntryNames(), hasItem("Thumbnail"));
+ assertThat(root.getEntryNames(), hasItem("Image"));
+ assertThat(root.getEntryNames(), hasItem("Tags"));
+ assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
+ assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
+
+
+ // Write out, re-load
+ POIFSFileSystem fs2 = writeOutAndReadBack(fs1);
+ fs1.close();
+
+ // Check they're still there
+ root = fs2.getRoot();
+ assertEquals(5, root.getEntryCount());
+ assertThat(root.getEntryNames(), hasItem("Thumbnail"));
+ assertThat(root.getEntryNames(), hasItem("Image"));
+ assertThat(root.getEntryNames(), hasItem("Tags"));
+ assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
+ assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
+
+
+ // Check the contents of them - parse the summary block and check
+ sinf = (SummaryInformation)PropertySetFactory.create(new DocumentInputStream(
+ (DocumentEntry)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME)));
+ assertEquals(131333, sinf.getOSVersion());
+
+ dinf = (DocumentSummaryInformation)PropertySetFactory.create(new DocumentInputStream(
+ (DocumentEntry)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)));
+ assertEquals(131333, dinf.getOSVersion());
+
+
+ // Add a test mini stream
+ testDir = root.createDirectory("Testing 123");
+ testDir.createDirectory("Testing 456");
+ testDir.createDirectory("Testing 789");
+ byte[] mini = new byte[] { 42, 0, 1, 2, 3, 4, 42 };
+ testDir.createDocument("Mini", new ByteArrayInputStream(mini));
+
+
+ // Write out, re-load
+ POIFSFileSystem fs3 = writeOutAndReadBack(fs2);
+ fs2.close();
+
+ root = fs3.getRoot();
+ testDir = (DirectoryEntry)root.getEntry("Testing 123");
+ assertEquals(6, root.getEntryCount());
+ assertThat(root.getEntryNames(), hasItem("Thumbnail"));
+ assertThat(root.getEntryNames(), hasItem("Image"));
+ assertThat(root.getEntryNames(), hasItem("Tags"));
+ assertThat(root.getEntryNames(), hasItem("Testing 123"));
+ assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
+ assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
+
+
+ // Check old and new are there
+ sinf = (SummaryInformation)PropertySetFactory.create(new DocumentInputStream(
+ (DocumentEntry)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME)));
+ assertEquals(131333, sinf.getOSVersion());
+
+ dinf = (DocumentSummaryInformation)PropertySetFactory.create(new DocumentInputStream(
+ (DocumentEntry)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)));
+ assertEquals(131333, dinf.getOSVersion());
+
+ assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini"));
+
+
+ // Write out and read once more, just to be sure
+ POIFSFileSystem fs4 = writeOutAndReadBack(fs3);
+ fs3.close();
+
+ root = fs4.getRoot();
+ testDir = (DirectoryEntry)root.getEntry("Testing 123");
+ assertEquals(6, root.getEntryCount());
+ assertThat(root.getEntryNames(), hasItem("Thumbnail"));
+ assertThat(root.getEntryNames(), hasItem("Image"));
+ assertThat(root.getEntryNames(), hasItem("Tags"));
+ assertThat(root.getEntryNames(), hasItem("Testing 123"));
+ assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
+ assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
+
+ sinf = (SummaryInformation)PropertySetFactory.create(new DocumentInputStream(
+ (DocumentEntry)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME)));
+ assertEquals(131333, sinf.getOSVersion());
+
+ dinf = (DocumentSummaryInformation)PropertySetFactory.create(new DocumentInputStream(
+ (DocumentEntry)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)));
+ assertEquals(131333, dinf.getOSVersion());
+
+ assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini"));
+
+
+ // Add a full stream, delete a full stream
+ byte[] main4096 = new byte[4096];
+ main4096[0] = -10;
+ main4096[4095] = -11;
+ testDir.createDocument("Normal4096", new ByteArrayInputStream(main4096));
+
+ root.getEntry("Tags").delete();
+
+
+ // Write out, re-load
+ POIFSFileSystem fs5 = writeOutAndReadBack(fs4);
+ fs4.close();
+
+ // Check it's all there
+ root = fs5.getRoot();
+ testDir = (DirectoryEntry)root.getEntry("Testing 123");
+ assertEquals(5, root.getEntryCount());
+ assertThat(root.getEntryNames(), hasItem("Thumbnail"));
+ assertThat(root.getEntryNames(), hasItem("Image"));
+ assertThat(root.getEntryNames(), hasItem("Testing 123"));
+ assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
+ assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
+
+
+ // Check old and new are there
+ sinf = (SummaryInformation)PropertySetFactory.create(new DocumentInputStream(
+ (DocumentEntry)root.getEntry(SummaryInformation.DEFAULT_STREAM_NAME)));
+ assertEquals(131333, sinf.getOSVersion());
+
+ dinf = (DocumentSummaryInformation)PropertySetFactory.create(new DocumentInputStream(
+ (DocumentEntry)root.getEntry(DocumentSummaryInformation.DEFAULT_STREAM_NAME)));
+ assertEquals(131333, dinf.getOSVersion());
+
+ assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini"));
+ assertContentsMatches(main4096, (DocumentEntry)testDir.getEntry("Normal4096"));
+
+
+ // Delete a directory, and add one more
+ testDir.getEntry("Testing 456").delete();
+ testDir.createDirectory("Testing ABC");
+
+
+ // Save
+ POIFSFileSystem fs6 = writeOutAndReadBack(fs5);
+ fs5.close();
+
+ // Check
+ root = fs6.getRoot();
+ testDir = (DirectoryEntry)root.getEntry("Testing 123");
+
+ assertEquals(5, root.getEntryCount());
+ assertThat(root.getEntryNames(), hasItem("Thumbnail"));
+ assertThat(root.getEntryNames(), hasItem("Image"));
+ assertThat(root.getEntryNames(), hasItem("Testing 123"));
+ assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
+ assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
+
+ assertEquals(4, testDir.getEntryCount());
+ assertThat(testDir.getEntryNames(), hasItem("Mini"));
+ assertThat(testDir.getEntryNames(), hasItem("Normal4096"));
+ assertThat(testDir.getEntryNames(), hasItem("Testing 789"));
+ assertThat(testDir.getEntryNames(), hasItem("Testing ABC"));
+
+
+ // Add another mini stream
+ byte[] mini2 = new byte[] { -42, 0, -1, -2, -3, -4, -42 };
+ testDir.createDocument("Mini2", new ByteArrayInputStream(mini2));
+
+ // Save, load, check
+ POIFSFileSystem fs7 = writeOutAndReadBack(fs6);
+ fs6.close();
+
+ root = fs7.getRoot();
+ testDir = (DirectoryEntry)root.getEntry("Testing 123");
+
+ assertEquals(5, root.getEntryCount());
+ assertThat(root.getEntryNames(), hasItem("Thumbnail"));
+ assertThat(root.getEntryNames(), hasItem("Image"));
+ assertThat(root.getEntryNames(), hasItem("Testing 123"));
+ assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
+ assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
+
+ assertEquals(5, testDir.getEntryCount());
+ assertThat(testDir.getEntryNames(), hasItem("Mini"));
+ assertThat(testDir.getEntryNames(), hasItem("Mini2"));
+ assertThat(testDir.getEntryNames(), hasItem("Normal4096"));
+ assertThat(testDir.getEntryNames(), hasItem("Testing 789"));
+ assertThat(testDir.getEntryNames(), hasItem("Testing ABC"));
+
+ assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini"));
+ assertContentsMatches(mini2, (DocumentEntry)testDir.getEntry("Mini2"));
+ assertContentsMatches(main4096, (DocumentEntry)testDir.getEntry("Normal4096"));
+
+
+ // Delete a mini stream, add one more
+ testDir.getEntry("Mini").delete();
+
+ byte[] mini3 = new byte[] { 42, 0, 42, 0, 42, 0, 42 };
+ testDir.createDocument("Mini3", new ByteArrayInputStream(mini3));
+
+
+ // Save, load, check
+ POIFSFileSystem fs8 = writeOutAndReadBack(fs7);
+ fs7.close();
+
+ root = fs8.getRoot();
+ testDir = (DirectoryEntry)root.getEntry("Testing 123");
+
+ assertEquals(5, root.getEntryCount());
+ assertThat(root.getEntryNames(), hasItem("Thumbnail"));
+ assertThat(root.getEntryNames(), hasItem("Image"));
+ assertThat(root.getEntryNames(), hasItem("Testing 123"));
+ assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
+ assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
+
+ assertEquals(5, testDir.getEntryCount());
+ assertThat(testDir.getEntryNames(), hasItem("Mini2"));
+ assertThat(testDir.getEntryNames(), hasItem("Mini3"));
+ assertThat(testDir.getEntryNames(), hasItem("Normal4096"));
+ assertThat(testDir.getEntryNames(), hasItem("Testing 789"));
+ assertThat(testDir.getEntryNames(), hasItem("Testing ABC"));
+
+ assertContentsMatches(mini2, (DocumentEntry)testDir.getEntry("Mini2"));
+ assertContentsMatches(mini3, (DocumentEntry)testDir.getEntry("Mini3"));
+ assertContentsMatches(main4096, (DocumentEntry)testDir.getEntry("Normal4096"));
+
+
+ // Change some existing streams
+ POIFSDocument mini2Doc = new POIFSDocument((DocumentNode)testDir.getEntry("Mini2"));
+ mini2Doc.replaceContents(new ByteArrayInputStream(mini));
+
+ byte[] main4106 = new byte[4106];
+ main4106[0] = 41;
+ main4106[4105] = 42;
+ POIFSDocument mainDoc = new POIFSDocument((DocumentNode)testDir.getEntry("Normal4096"));
+ mainDoc.replaceContents(new ByteArrayInputStream(main4106));
+
+
+ // Re-check
+ POIFSFileSystem fs9 = writeOutAndReadBack(fs8);
+ fs8.close();
+
+ root = fs9.getRoot();
+ testDir = (DirectoryEntry)root.getEntry("Testing 123");
+
+ assertEquals(5, root.getEntryCount());
+ assertThat(root.getEntryNames(), hasItem("Thumbnail"));
+ assertThat(root.getEntryNames(), hasItem("Image"));
+ assertThat(root.getEntryNames(), hasItem("Testing 123"));
+ assertThat(root.getEntryNames(), hasItem("\u0005DocumentSummaryInformation"));
+ assertThat(root.getEntryNames(), hasItem("\u0005SummaryInformation"));
+
+ assertEquals(5, testDir.getEntryCount());
+ assertThat(testDir.getEntryNames(), hasItem("Mini2"));
+ assertThat(testDir.getEntryNames(), hasItem("Mini3"));
+ assertThat(testDir.getEntryNames(), hasItem("Normal4096"));
+ assertThat(testDir.getEntryNames(), hasItem("Testing 789"));
+ assertThat(testDir.getEntryNames(), hasItem("Testing ABC"));
+
+ assertContentsMatches(mini, (DocumentEntry)testDir.getEntry("Mini2"));
+ assertContentsMatches(mini3, (DocumentEntry)testDir.getEntry("Mini3"));
+ assertContentsMatches(main4106, (DocumentEntry)testDir.getEntry("Normal4096"));
+
+
+ // All done
+ fs9.close();
+ }
+ }
+
+ /**
+ * Create a new file, write it and read it again
+ * Then, add some streams, write and read
+ */
+ @Test
+ public void createWriteRead() throws IOException {
+ POIFSFileSystem fs1 = new POIFSFileSystem();
+ DocumentEntry miniDoc;
+ DocumentEntry normDoc;
+
+ // Initially has Properties + BAT but not SBAT
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(1));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(2));
+
+ // Check that the SBAT is empty
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getRoot().getProperty().getStartBlock());
+
+ // Check that properties table was given block 0
+ assertEquals(0, fs1._get_property_table().getStartBlock());
+
+ // Write and read it
+ POIFSFileSystem fs2 = writeOutAndReadBack(fs1);
+ fs1.close();
+
+ // No change, SBAT remains empty
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs2.getNextBlock(1));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs2.getNextBlock(2));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs2.getNextBlock(3));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getRoot().getProperty().getStartBlock());
+ assertEquals(0, fs2._get_property_table().getStartBlock());
+ fs2.close();
+
+ // Check the same but with saving to a file
+ POIFSFileSystem fs3 = new POIFSFileSystem();
+ POIFSFileSystem fs4 = writeOutFileAndReadBack(fs3);
+ fs3.close();
+
+ // Same, no change, SBAT remains empty
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs4.getNextBlock(1));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(2));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(3));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getRoot().getProperty().getStartBlock());
+ assertEquals(0, fs4._get_property_table().getStartBlock());
+
+
+
+ // Put everything within a new directory
+ DirectoryEntry testDir = fs4.createDirectory("Test Directory");
+
+ // Add a new Normal Stream (Normal Streams minimum 4096 bytes)
+ byte[] main4096 = new byte[4096];
+ main4096[0] = -10;
+ main4096[4095] = -11;
+ testDir.createDocument("Normal4096", new ByteArrayInputStream(main4096));
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs4.getNextBlock(1));
+ assertEquals(3, fs4.getNextBlock(2));
+ assertEquals(4, fs4.getNextBlock(3));
+ assertEquals(5, fs4.getNextBlock(4));
+ assertEquals(6, fs4.getNextBlock(5));
+ assertEquals(7, fs4.getNextBlock(6));
+ assertEquals(8, fs4.getNextBlock(7));
+ assertEquals(9, fs4.getNextBlock(8));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(9));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(10));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(11));
+ // SBAT still unused
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getRoot().getProperty().getStartBlock());
+
+
+ // Add a bigger Normal Stream
+ byte[] main5124 = new byte[5124];
+ main5124[0] = -22;
+ main5124[5123] = -33;
+ testDir.createDocument("Normal5124", new ByteArrayInputStream(main5124));
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs4.getNextBlock(1));
+ assertEquals(3, fs4.getNextBlock(2));
+ assertEquals(4, fs4.getNextBlock(3));
+ assertEquals(5, fs4.getNextBlock(4));
+ assertEquals(6, fs4.getNextBlock(5));
+ assertEquals(7, fs4.getNextBlock(6));
+ assertEquals(8, fs4.getNextBlock(7));
+ assertEquals(9, fs4.getNextBlock(8));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(9));
+
+ assertEquals(11, fs4.getNextBlock(10));
+ assertEquals(12, fs4.getNextBlock(11));
+ assertEquals(13, fs4.getNextBlock(12));
+ assertEquals(14, fs4.getNextBlock(13));
+ assertEquals(15, fs4.getNextBlock(14));
+ assertEquals(16, fs4.getNextBlock(15));
+ assertEquals(17, fs4.getNextBlock(16));
+ assertEquals(18, fs4.getNextBlock(17));
+ assertEquals(19, fs4.getNextBlock(18));
+ assertEquals(20, fs4.getNextBlock(19));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(20));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(21));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(22));
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getRoot().getProperty().getStartBlock());
+
+
+ // Now Add a mini stream
+ byte[] mini = new byte[] { 42, 0, 1, 2, 3, 4, 42 };
+ testDir.createDocument("Mini", new ByteArrayInputStream(mini));
+
+ // Mini stream will get one block for fat + one block for data
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs4.getNextBlock(1));
+ assertEquals(3, fs4.getNextBlock(2));
+ assertEquals(4, fs4.getNextBlock(3));
+ assertEquals(5, fs4.getNextBlock(4));
+ assertEquals(6, fs4.getNextBlock(5));
+ assertEquals(7, fs4.getNextBlock(6));
+ assertEquals(8, fs4.getNextBlock(7));
+ assertEquals(9, fs4.getNextBlock(8));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(9));
+
+ assertEquals(11, fs4.getNextBlock(10));
+ assertEquals(12, fs4.getNextBlock(11));
+ assertEquals(13, fs4.getNextBlock(12));
+ assertEquals(14, fs4.getNextBlock(13));
+ assertEquals(15, fs4.getNextBlock(14));
+ assertEquals(16, fs4.getNextBlock(15));
+ assertEquals(17, fs4.getNextBlock(16));
+ assertEquals(18, fs4.getNextBlock(17));
+ assertEquals(19, fs4.getNextBlock(18));
+ assertEquals(20, fs4.getNextBlock(19));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(20));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(21));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs4.getNextBlock(22));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs4.getNextBlock(23));
+
+ // Check the mini stream location was set
+ // (21 is mini fat, 22 is first mini stream block)
+ assertEquals(22, fs4.getRoot().getProperty().getStartBlock());
+
+
+ // Write and read back
+ POIFSFileSystem fs5 = writeOutAndReadBack(fs4);
+ fs4.close();
+ HeaderBlock header = writeOutAndReadHeader(fs5);
+
+ // Check the header has the right points in it
+ assertEquals(1, header.getBATCount());
+ assertEquals(1, header.getBATArray()[0]);
+ assertEquals(0, header.getPropertyStart());
+ assertEquals(1, header.getSBATCount());
+ assertEquals(21, header.getSBATStart());
+ assertEquals(22, fs5._get_property_table().getRoot().getStartBlock());
+
+ // Block use should be almost the same, except the properties
+ // stream will have grown out to cover 2 blocks
+ // Check the block use is all unchanged
+ assertEquals(23, fs5.getNextBlock(0)); // Properties now extends over 2 blocks
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs5.getNextBlock(1));
+
+ assertEquals(3, fs5.getNextBlock(2));
+ assertEquals(4, fs5.getNextBlock(3));
+ assertEquals(5, fs5.getNextBlock(4));
+ assertEquals(6, fs5.getNextBlock(5));
+ assertEquals(7, fs5.getNextBlock(6));
+ assertEquals(8, fs5.getNextBlock(7));
+ assertEquals(9, fs5.getNextBlock(8));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(9)); // End of normal4096
+
+ assertEquals(11, fs5.getNextBlock(10));
+ assertEquals(12, fs5.getNextBlock(11));
+ assertEquals(13, fs5.getNextBlock(12));
+ assertEquals(14, fs5.getNextBlock(13));
+ assertEquals(15, fs5.getNextBlock(14));
+ assertEquals(16, fs5.getNextBlock(15));
+ assertEquals(17, fs5.getNextBlock(16));
+ assertEquals(18, fs5.getNextBlock(17));
+ assertEquals(19, fs5.getNextBlock(18));
+ assertEquals(20, fs5.getNextBlock(19));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(20)); // End of normal5124
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(21)); // Mini Stream FAT
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(22)); // Mini Stream data
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs5.getNextBlock(23)); // Properties #2
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs5.getNextBlock(24));
+
+
+ // Check some data
+ assertEquals(1, fs5.getRoot().getEntryCount());
+ testDir = (DirectoryEntry)fs5.getRoot().getEntry("Test Directory");
+ assertEquals(3, testDir.getEntryCount());
+
+ miniDoc = (DocumentEntry)testDir.getEntry("Mini");
+ assertContentsMatches(mini, miniDoc);
+
+ normDoc = (DocumentEntry)testDir.getEntry("Normal4096");
+ assertContentsMatches(main4096, normDoc);
+
+ normDoc = (DocumentEntry)testDir.getEntry("Normal5124");
+ assertContentsMatches(main5124, normDoc);
+
+
+ // Delete a couple of streams
+ miniDoc.delete();
+ normDoc.delete();
+
+
+ // Check - will have un-used sectors now
+ POIFSFileSystem fs6 = writeOutAndReadBack(fs5);
+ fs5.close();
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs6.getNextBlock(0)); // Props back in 1 block
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs6.getNextBlock(1));
+
+ assertEquals(3, fs6.getNextBlock(2));
+ assertEquals(4, fs6.getNextBlock(3));
+ assertEquals(5, fs6.getNextBlock(4));
+ assertEquals(6, fs6.getNextBlock(5));
+ assertEquals(7, fs6.getNextBlock(6));
+ assertEquals(8, fs6.getNextBlock(7));
+ assertEquals(9, fs6.getNextBlock(8));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs6.getNextBlock(9)); // End of normal4096
+
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(10));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(11));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(12));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(13));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(14));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(15));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(16));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(17));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(18));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(19));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(20));
+
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs6.getNextBlock(21)); // Mini Stream FAT
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs6.getNextBlock(22)); // Mini Stream data
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(23)); // Properties gone
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(24));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs6.getNextBlock(25));
+
+ // All done
+ fs6.close();
+ }
+
+ @Test
+ public void addBeforeWrite() throws IOException {
+ POIFSFileSystem fs1 = new POIFSFileSystem();
+ DocumentEntry miniDoc;
+ DocumentEntry normDoc;
+ HeaderBlock hdr;
+
+ // Initially has Properties + BAT but nothing else
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(1));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(2));
+
+ hdr = writeOutAndReadHeader(fs1);
+ // No mini stream, and no xbats
+ // Will have fat then properties stream
+ assertEquals(1, hdr.getBATCount());
+ assertEquals(1, hdr.getBATArray()[0]);
+ assertEquals(0, hdr.getPropertyStart());
+ assertEquals(POIFSConstants.END_OF_CHAIN, hdr.getSBATStart());
+ assertEquals(POIFSConstants.END_OF_CHAIN, hdr.getXBATIndex());
+ assertEquals(POIFSConstants.SMALLER_BIG_BLOCK_SIZE*3, fs1.size());
+ fs1.close();
+
+ // Get a clean filesystem to start with
+ fs1 = new POIFSFileSystem();
+
+ // Put our test files in a non-standard place
+ DirectoryEntry parentDir = fs1.createDirectory("Parent Directory");
+ DirectoryEntry testDir = parentDir.createDirectory("Test Directory");
+
+
+ // Add to the mini stream
+ byte[] mini = new byte[] { 42, 0, 1, 2, 3, 4, 42 };
+ testDir.createDocument("Mini", new ByteArrayInputStream(mini));
+
+ // Add to the main stream
+ byte[] main4096 = new byte[4096];
+ main4096[0] = -10;
+ main4096[4095] = -11;
+ testDir.createDocument("Normal4096", new ByteArrayInputStream(main4096));
+
+
+ // Check the mini stream was added, then the main stream
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(0));
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs1.getNextBlock(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(2)); // Mini Fat
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(3)); // Mini Stream
+ assertEquals(5, fs1.getNextBlock(4)); // Main Stream
+ assertEquals(6, fs1.getNextBlock(5));
+ assertEquals(7, fs1.getNextBlock(6));
+ assertEquals(8, fs1.getNextBlock(7));
+ assertEquals(9, fs1.getNextBlock(8));
+ assertEquals(10, fs1.getNextBlock(9));
+ assertEquals(11, fs1.getNextBlock(10));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs1.getNextBlock(11));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs1.getNextBlock(12));
+ assertEquals(POIFSConstants.SMALLER_BIG_BLOCK_SIZE*13, fs1.size());
+
+
+ // Check that we can read the right data pre-write
+ miniDoc = (DocumentEntry)testDir.getEntry("Mini");
+ assertContentsMatches(mini, miniDoc);
+
+ normDoc = (DocumentEntry)testDir.getEntry("Normal4096");
+ assertContentsMatches(main4096, normDoc);
+
+
+ // Write, read, check
+ hdr = writeOutAndReadHeader(fs1);
+ POIFSFileSystem fs2 = writeOutAndReadBack(fs1);
+ fs1.close();
+
+ // Check the header details - will have the sbat near the start,
+ // then the properties at the end
+ assertEquals(1, hdr.getBATCount());
+ assertEquals(1, hdr.getBATArray()[0]);
+ assertEquals(2, hdr.getSBATStart());
+ assertEquals(0, hdr.getPropertyStart());
+ assertEquals(POIFSConstants.END_OF_CHAIN, hdr.getXBATIndex());
+
+ // Check the block allocation is unchanged, other than
+ // the properties stream going in at the end
+ assertEquals(12, fs2.getNextBlock(0)); // Properties
+ assertEquals(POIFSConstants.FAT_SECTOR_BLOCK, fs2.getNextBlock(1));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(2));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(3));
+ assertEquals(5, fs2.getNextBlock(4));
+ assertEquals(6, fs2.getNextBlock(5));
+ assertEquals(7, fs2.getNextBlock(6));
+ assertEquals(8, fs2.getNextBlock(7));
+ assertEquals(9, fs2.getNextBlock(8));
+ assertEquals(10, fs2.getNextBlock(9));
+ assertEquals(11, fs2.getNextBlock(10));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(11));
+ assertEquals(POIFSConstants.END_OF_CHAIN, fs2.getNextBlock(12));
+ assertEquals(POIFSConstants.UNUSED_BLOCK, fs2.getNextBlock(13));
+ assertEquals(POIFSConstants.SMALLER_BIG_BLOCK_SIZE*14, fs2.size());
+
+
+ // Check the data
+ DirectoryEntry fsRoot = fs2.getRoot();
+ assertEquals(1, fsRoot.getEntryCount());
+
+ parentDir = (DirectoryEntry)fsRoot.getEntry("Parent Directory");
+ assertEquals(1, parentDir.getEntryCount());
+
+ testDir = (DirectoryEntry)parentDir.getEntry("Test Directory");
+ assertEquals(2, testDir.getEntryCount());
+
+ miniDoc = (DocumentEntry)testDir.getEntry("Mini");
+ assertContentsMatches(mini, miniDoc);
+
+ normDoc = (DocumentEntry)testDir.getEntry("Normal4096");
+ assertContentsMatches(main4096, normDoc);
+
+
+ // Add one more stream to each, then save and re-load
+ byte[] mini2 = new byte[] { -42, 0, -1, -2, -3, -4, -42 };
+ testDir.createDocument("Mini2", new ByteArrayInputStream(mini2));
+
+ // Add to the main stream
+ byte[] main4106 = new byte[4106];
+ main4106[0] = 41;
+ main4106[4105] = 42;
+ testDir.createDocument("Normal4106", new ByteArrayInputStream(main4106));
+
+
+ // Recheck the data in all 4 streams
+ POIFSFileSystem fs3 = writeOutAndReadBack(fs2);
+ fs2.close();
+
+ fsRoot = fs3.getRoot();
+ assertEquals(1, fsRoot.getEntryCount());
+
+ parentDir = (DirectoryEntry)fsRoot.getEntry("Parent Directory");
+ assertEquals(1, parentDir.getEntryCount());
+
+ testDir = (DirectoryEntry)parentDir.getEntry("Test Directory");
+ assertEquals(4, testDir.getEntryCount());
+
+ miniDoc = (DocumentEntry)testDir.getEntry("Mini");
+ assertContentsMatches(mini, miniDoc);
+
+ miniDoc = (DocumentEntry)testDir.getEntry("Mini2");
+ assertContentsMatches(mini2, miniDoc);
+
+ normDoc = (DocumentEntry)testDir.getEntry("Normal4106");
+ assertContentsMatches(main4106, normDoc);
+
+ // All done
+ fs3.close();
+ }
+
+ @Test
+ public void readZeroLengthEntries() throws IOException {
+ POIFSFileSystem fs = new POIFSFileSystem(_inst.getFile("only-zero-byte-streams.ole2"));
+ DirectoryNode testDir = fs.getRoot();
+ assertEquals(3, testDir.getEntryCount());
+ DocumentEntry entry;
+
+ entry = (DocumentEntry)testDir.getEntry("test-zero-1");
+ assertNotNull(entry);
+ assertEquals(0, entry.getSize());
+
+ entry = (DocumentEntry)testDir.getEntry("test-zero-2");
+ assertNotNull(entry);
+ assertEquals(0, entry.getSize());
+
+ entry = (DocumentEntry)testDir.getEntry("test-zero-3");
+ assertNotNull(entry);
+ assertEquals(0, entry.getSize());
+
+ // Check properties, all have zero length, no blocks
+ PropertyTable props = fs._get_property_table();
+ assertEquals(POIFSConstants.END_OF_CHAIN, props.getRoot().getStartBlock());
+ for (Property prop : props.getRoot()) {
+ assertEquals("test-zero-", prop.getName().substring(0, 10));
+ assertEquals(POIFSConstants.END_OF_CHAIN, prop.getStartBlock());
+ }
+
+ // All done
+ fs.close();
+ }
+
+ @Test
+ public void writeZeroLengthEntries() throws IOException {
+ POIFSFileSystem fs1 = new POIFSFileSystem();
+ DirectoryNode testDir = fs1.getRoot();
+ DocumentEntry miniDoc;
+ DocumentEntry normDoc;
+ DocumentEntry emptyDoc;
+
+ // Add mini and normal sized entries to start
+ byte[] mini2 = new byte[] { -42, 0, -1, -2, -3, -4, -42 };
+ testDir.createDocument("Mini2", new ByteArrayInputStream(mini2));
+
+ // Add to the main stream
+ byte[] main4106 = new byte[4106];
+ main4106[0] = 41;
+ main4106[4105] = 42;
+ testDir.createDocument("Normal4106", new ByteArrayInputStream(main4106));
+
+ // Now add some empty ones
+ byte[] empty = new byte[0];
+ testDir.createDocument("empty-1", new ByteArrayInputStream(empty));
+ testDir.createDocument("empty-2", new ByteArrayInputStream(empty));
+ testDir.createDocument("empty-3", new ByteArrayInputStream(empty));
+
+ // Check
+ miniDoc = (DocumentEntry)testDir.getEntry("Mini2");
+ assertContentsMatches(mini2, miniDoc);
+
+ normDoc = (DocumentEntry)testDir.getEntry("Normal4106");
+ assertContentsMatches(main4106, normDoc);
+
+ emptyDoc = (DocumentEntry)testDir.getEntry("empty-1");
+ assertContentsMatches(empty, emptyDoc);
+
+ emptyDoc = (DocumentEntry)testDir.getEntry("empty-2");
+ assertContentsMatches(empty, emptyDoc);
+
+ emptyDoc = (DocumentEntry)testDir.getEntry("empty-3");
+ assertContentsMatches(empty, emptyDoc);
+
+ // Look at the properties entry, and check the empty ones
+ // have zero size and no start block
+ PropertyTable props = fs1._get_property_table();
+ Iterator<Property> propsIt = props.getRoot().getChildren();
+
+ Property prop = propsIt.next();
+ assertEquals("Mini2", prop.getName());
+ assertEquals(0, prop.getStartBlock());
+ assertEquals(7, prop.getSize());
+
+ prop = propsIt.next();
+ assertEquals("Normal4106", prop.getName());
+ assertEquals(4, prop.getStartBlock()); // BAT, Props, SBAT, MIni
+ assertEquals(4106, prop.getSize());
+
+ prop = propsIt.next();
+ assertEquals("empty-1", prop.getName());
+ assertEquals(POIFSConstants.END_OF_CHAIN, prop.getStartBlock());
+ assertEquals(0, prop.getSize());
+
+ prop = propsIt.next();
+ assertEquals("empty-2", prop.getName());
+ assertEquals(POIFSConstants.END_OF_CHAIN, prop.getStartBlock());
+ assertEquals(0, prop.getSize());
+
+ prop = propsIt.next();
+ assertEquals("empty-3", prop.getName());
+ assertEquals(POIFSConstants.END_OF_CHAIN, prop.getStartBlock());
+ assertEquals(0, prop.getSize());
+
+
+ // Save and re-check
+ POIFSFileSystem fs2 = writeOutAndReadBack(fs1);
+ fs1.close();
+ testDir = fs2.getRoot();
+
+ miniDoc = (DocumentEntry)testDir.getEntry("Mini2");
+ assertContentsMatches(mini2, miniDoc);
+
+ normDoc = (DocumentEntry)testDir.getEntry("Normal4106");
+ assertContentsMatches(main4106, normDoc);
+
+ emptyDoc = (DocumentEntry)testDir.getEntry("empty-1");
+ assertContentsMatches(empty, emptyDoc);
+
+ emptyDoc = (DocumentEntry)testDir.getEntry("empty-2");
+ assertContentsMatches(empty, emptyDoc);
+
+ emptyDoc = (DocumentEntry)testDir.getEntry("empty-3");
+ assertContentsMatches(empty, emptyDoc);
+
+ // Check that a mini-stream was assigned, with one block used
+ assertEquals(3, testDir.getProperty().getStartBlock());
+ assertEquals(64, testDir.getProperty().getSize());
+
+ // All done
+ fs2.close();
+ }
+
+ /**
+ * Test that we can read a file with NPOIFS, create a new NPOIFS instance,
+ * write it out, read it with POIFS, and see the original data
+ */
+ @Test
+ public void NPOIFSReadCopyWritePOIFSRead() throws IOException {
+ File testFile = POIDataSamples.getSpreadSheetInstance().getFile("Simple.xls");
+ POIFSFileSystem src = new POIFSFileSystem(testFile);
+ byte wbDataExp[] = IOUtils.toByteArray(src.createDocumentInputStream("Workbook"));
+
+ POIFSFileSystem nfs = new POIFSFileSystem();
+ EntryUtils.copyNodes(src.getRoot(), nfs.getRoot());
+ src.close();
+
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ nfs.writeFilesystem(bos);
+ nfs.close();
+
+ POIFSFileSystem pfs = new POIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()));
+ byte wbDataAct[] = IOUtils.toByteArray(pfs.createDocumentInputStream("Workbook"));
+
+ assertThat(wbDataExp, equalTo(wbDataAct));
+ pfs.close();
+ }
+
+ /**
+ * Ensure that you can recursively delete directories and their
+ * contents
+ */
+ @Test
+ public void RecursiveDelete() throws IOException {
+ File testFile = POIDataSamples.getSpreadSheetInstance().getFile("SimpleMacro.xls");
+ POIFSFileSystem src = new POIFSFileSystem(testFile);
+
+ // Starts out with 5 entries:
+ // _VBA_PROJECT_CUR
+ // SummaryInformation <(0x05)SummaryInformation>
+ // DocumentSummaryInformation <(0x05)DocumentSummaryInformation>
+ // Workbook
+ // CompObj <(0x01)CompObj>
+ assertEquals(5, _countChildren(src._get_property_table().getRoot()));
+ assertEquals(5, src.getRoot().getEntryCount());
+
+ // Grab the VBA project root
+ DirectoryEntry vbaProj = (DirectoryEntry)src.getRoot().getEntry("_VBA_PROJECT_CUR");
+ assertEquals(3, vbaProj.getEntryCount());
+ // Can't delete yet, has stuff
+ assertFalse(vbaProj.delete());
+ // Recursively delete
+ _recursiveDeletee(vbaProj);
+
+ // Entries gone
+ assertEquals(4, _countChildren(src._get_property_table().getRoot()));
+ assertEquals(4, src.getRoot().getEntryCount());
+
+ // Done
+ src.close();
+ }
+ private void _recursiveDeletee(Entry entry) throws IOException {
+ if (entry.isDocumentEntry()) {
+ assertTrue(entry.delete());
+ return;
+ }
+
+ DirectoryEntry dir = (DirectoryEntry)entry;
+ String[] names = dir.getEntryNames().toArray(new String[dir.getEntryCount()]);
+ for (String name : names) {
+ Entry ce = dir.getEntry(name);
+ _recursiveDeletee(ce);
+ }
+ assertTrue(dir.delete());
+ }
+ @SuppressWarnings("unused")
+ private int _countChildren(DirectoryProperty p) {
+ int count = 0;
+ for (Property cp : p) { count++; }
+ return count;
+ }
+
+ /**
+ * To ensure we can create a file >2gb in size, as well as to
+ * extend existing files past the 2gb boundary.
+ *
+ * Note that to run this test, you will require 2.5+gb of free
+ * space on your TMP/TEMP partition/disk
+ *
+ * Note that to run this test, you need to be able to mmap 2.5+gb
+ * files, which may need bigger kernel.shmmax and vm.max_map_count
+ * settings on Linux.
+ *
+ * TODO Fix this to work...
+ */
+ @Test
+ @Ignore("Work in progress test for #60670")
+ public void creationAndExtensionPast2GB() throws Exception {
+ File big = TempFile.createTempFile("poi-test-", ".ole2");
+ Assume.assumeTrue("2.5gb of free space is required on your tmp/temp " +
+ "partition/disk to run large file tests",
+ big.getFreeSpace() > 2.5*1024*1024*1024);
+ System.out.println("Slow, memory heavy test in progress....");
+
+ int s100mb = 100*1024*1024;
+ int s512mb = 512*1024*1024;
+ long s2gb = 2L *1024*1024*1024;
+ DocumentEntry entry;
+ POIFSFileSystem fs;
+
+ // Create a just-sub 2gb file
+ fs = POIFSFileSystem.create(big);
+ for (int i=0; i<19; i++) {
+ fs.createDocument(new DummyDataInputStream(s100mb), "Entry"+i);
+ }
+ fs.writeFilesystem();
+ fs.close();
+
+ // Extend it past the 2gb mark
+ fs = new POIFSFileSystem(big, false);
+ for (int i=0; i<19; i++) {
+ entry = (DocumentEntry)fs.getRoot().getEntry("Entry"+i);
+ assertNotNull(entry);
+ assertEquals(s100mb, entry.getSize());
+ }
+
+ fs.createDocument(new DummyDataInputStream(s512mb), "Bigger");
+ fs.writeFilesystem();
+ fs.close();
+
+ // Check it still works
+ fs = new POIFSFileSystem(big, false);
+ for (int i=0; i<19; i++) {
+ entry = (DocumentEntry)fs.getRoot().getEntry("Entry"+i);
+ assertNotNull(entry);
+ assertEquals(s100mb, entry.getSize());
+ }
+ entry = (DocumentEntry)fs.getRoot().getEntry("Bigger");
+ assertNotNull(entry);
+ assertEquals(s512mb, entry.getSize());
+
+ // Tidy
+ fs.close();
+ assertTrue(big.delete());
+
+
+ // Create a >2gb file
+ fs = POIFSFileSystem.create(big);
+ for (int i=0; i<4; i++) {
+ fs.createDocument(new DummyDataInputStream(s512mb), "Entry"+i);
+ }
+ fs.writeFilesystem();
+ fs.close();
+
+ // Read it
+ fs = new POIFSFileSystem(big, false);
+ for (int i=0; i<4; i++) {
+ entry = (DocumentEntry)fs.getRoot().getEntry("Entry"+i);
+ assertNotNull(entry);
+ assertEquals(s512mb, entry.getSize());
+ }
+
+ // Extend it
+ fs.createDocument(new DummyDataInputStream(s512mb), "Entry4");
+ fs.writeFilesystem();
+ fs.close();
+
+ // Check it worked
+ fs = new POIFSFileSystem(big, false);
+ for (int i=0; i<5; i++) {
+ entry = (DocumentEntry)fs.getRoot().getEntry("Entry"+i);
+ assertNotNull(entry);
+ assertEquals(s512mb, entry.getSize());
+ }
+
+ // Tidy
+ fs.close();
+ assertTrue(big.delete());
+
+ // Create a file with a 2gb entry
+ fs = POIFSFileSystem.create(big);
+ fs.createDocument(new DummyDataInputStream(s100mb), "Small");
+ // TODO Check we get a helpful error about the max size
+ fs.createDocument(new DummyDataInputStream(s2gb), "Big");
+ }
+
+ private static final class DummyDataInputStream extends InputStream {
+ private final long maxSize;
+ private long size;
+ private DummyDataInputStream(long maxSize) {
+ this.maxSize = maxSize;
+ this.size = 0;
+ }
+
+ public int read() {
+ if (size >= maxSize) return -1;
+ size++;
+ return (int)(size % 128);
+ }
+
+ public int read(byte[] b) {
+ return read(b, 0, b.length);
+ }
+ public int read(byte[] b, int offset, int len) {
+ if (size >= maxSize) return -1;
+ int sz = (int)Math.min(len, maxSize-size);
+ for (int i=0; i<sz; i++) {
+ b[i+offset] = (byte)((size+i) % 128);
+ }
+ size += sz;
+ return sz;
+ }
+ }
+
+ @Ignore("Takes a long time to run")
+ @Test
+ public void performance() throws Exception {
+ int iterations = 200;//1_000;
+
+ System.out.println("NPOI:");
+ long start = System.currentTimeMillis();
+
+ for (int i = 0; i < iterations; i++) {
+
+ try (InputStream inputStream = POIDataSamples.getHSMFInstance().openResourceAsStream("lots-of-recipients.msg");
+ POIFSFileSystem srcFileSystem = new POIFSFileSystem(inputStream);
+ POIFSFileSystem destFileSystem = new POIFSFileSystem()) {
+
+ copyAllEntries(srcFileSystem.getRoot(), destFileSystem.getRoot());
+
+ File file = File.createTempFile("npoi", ".dat");
+ try (OutputStream outputStream = new FileOutputStream(file)) {
+ destFileSystem.writeFilesystem(outputStream);
+ }
+
+ assertTrue(file.delete());
+ if (i % 10 == 0) System.out.print(".");
+ }
+ }
+
+ System.out.println();
+ System.out.println("NPOI took: " + (System.currentTimeMillis() - start));
+
+ System.out.println();
+ System.out.println();
+ }
+
+ private static void copyAllEntries(DirectoryEntry srcDirectory, DirectoryEntry destDirectory) throws IOException {
+ Iterator<Entry> iterator = srcDirectory.getEntries();
+
+ while (iterator.hasNext()) {
+ Entry entry = iterator.next();
+
+ if (entry.isDirectoryEntry()) {
+ DirectoryEntry childDest = destDirectory.createDirectory(entry.getName());
+ copyAllEntries((DirectoryEntry) entry, childDest);
+
+ } else {
+ DocumentEntry srcEntry = (DocumentEntry) entry;
+
+ try (InputStream inputStream = new DocumentInputStream(srcEntry)) {
+ destDirectory.createDocument(entry.getName(), inputStream);
+ }
+ }
+ }
+ }
+
+}
package org.apache.poi.poifs.macros;
import org.apache.poi.POIDataSamples;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.StringUtil;
import org.junit.Ignore;
public class TestVBAMacroReader {
private static final Map<POIDataSamples, String> expectedMacroContents;
- protected static String readVBA(POIDataSamples poiDataSamples) {
+ private static String readVBA(POIDataSamples poiDataSamples) {
File macro = poiDataSamples.getFile("SimpleMacro.vba");
final byte[] bytes;
try {
fromNPOIFS(POIDataSamples.getDiagramInstance(), "SimpleMacro.vsd");
}
- protected void fromFile(POIDataSamples dataSamples, String filename) throws IOException {
+ private void fromFile(POIDataSamples dataSamples, String filename) throws IOException {
File f = dataSamples.getFile(filename);
try (VBAMacroReader r = new VBAMacroReader(f)) {
assertMacroContents(dataSamples, r);
}
}
- protected void fromStream(POIDataSamples dataSamples, String filename) throws IOException {
+ private void fromStream(POIDataSamples dataSamples, String filename) throws IOException {
try (InputStream fis = dataSamples.openResourceAsStream(filename)) {
try (VBAMacroReader r = new VBAMacroReader(fis)) {
assertMacroContents(dataSamples, r);
}
}
- protected void fromNPOIFS(POIDataSamples dataSamples, String filename) throws IOException {
+ private void fromNPOIFS(POIDataSamples dataSamples, String filename) throws IOException {
File f = dataSamples.getFile(filename);
- try (NPOIFSFileSystem fs = new NPOIFSFileSystem(f)) {
+ try (POIFSFileSystem fs = new POIFSFileSystem(f)) {
try (VBAMacroReader r = new VBAMacroReader(fs)) {
assertMacroContents(dataSamples, r);
}
}
}
- protected void assertMacroContents(POIDataSamples samples, VBAMacroReader r) throws IOException {
+ private void assertMacroContents(POIDataSamples samples, VBAMacroReader r) throws IOException {
assertNotNull(r);
Map<String,Module> contents = r.readMacroModules();
assertNotNull(contents);
import java.io.ByteArrayInputStream;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.List;
-import org.apache.poi.poifs.storage.RawDataBlock;
import org.apache.poi.poifs.storage.RawDataUtil;
import org.junit.Test;
public void testConvertToProperties() throws IOException {
// real data from a real file!
- String[] hexData = {
- "52 00 6F 00 6F 00 74 00 20 00 45 00 6E 00 74 00 72 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "16 00 05 00 FF FF FF FF FF FF FF FF 0D 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 C0 47 A4 DE EC 65 C1 01 03 00 00 00 40 0C 00 00 00 00 00 00",
- "44 00 6F 00 63 00 75 00 6D 00 65 00 6E 00 74 00 20 00 44 00 65 00 74 00 61 00 69 00 6C 00 73 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 A0 7F 29 08 59 56 C1 01 C0 20 31 08 59 56 C1 01 00 00 00 00 00 00 00 00 00 00 00 00",
- "43 00 72 00 65 00 61 00 74 00 69 00 6F 00 6E 00 20 00 4E 00 61 00 6D 00 65 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1C 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 2C 00 00 00 00 00 00 00",
- "43 00 72 00 65 00 61 00 74 00 69 00 6F 00 6E 00 20 00 44 00 61 00 74 00 65 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1C 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 0A 00 00 00 00 00 00 00",
- "4C 00 61 00 73 00 74 00 20 00 53 00 61 00 76 00 65 00 64 00 20 00 44 00 61 00 74 00 65 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "20 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 0A 00 00 00 00 00 00 00",
- "44 00 61 00 74 00 65 00 20 00 46 00 69 00 6C 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "16 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 0A 00 00 00 00 00 00 00",
- "44 00 6F 00 63 00 75 00 6D 00 65 00 6E 00 74 00 20 00 56 00 65 00 72 00 73 00 69 00 6F 00 6E 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 04 00 00 00 00 00 00 00",
- "42 00 61 00 63 00 63 00 68 00 75 00 73 00 20 00 44 00 65 00 61 00 6C 00 20 00 4E 00 75 00 6D 00",
- "62 00 65 00 72 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 04 00 00 00 00 00 00 00",
- "44 00 6F 00 63 00 75 00 6D 00 65 00 6E 00 74 00 20 00 4C 00 6F 00 63 00 6B 00 65 00 64 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "20 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 04 00 00 00 00 00 00 00",
- "44 00 6F 00 63 00 75 00 6D 00 65 00 6E 00 74 00 20 00 44 00 65 00 61 00 6C 00 20 00 54 00 79 00",
- "70 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "26 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 06 00 00 00 00 00 00 00",
- "44 00 6F 00 63 00 75 00 6D 00 65 00 6E 00 74 00 20 00 41 00 75 00 64 00 69 00 74 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1E 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 C0 20 31 08 59 56 C1 01 80 48 3A 08 59 56 C1 01 00 00 00 00 00 00 00 00 00 00 00 00",
- "55 00 73 00 65 00 72 00 20 00 41 00 75 00 64 00 69 00 74 00 20 00 54 00 72 00 61 00 69 00 6C 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 42 00 00 00 00 00 00 00",
- "43 00 6F 00 6E 00 74 00 72 00 61 00 63 00 74 00 20 00 47 00 65 00 6E 00 65 00 72 00 61 00 74 00",
- "69 00 6F 00 6E 00 20 00 49 00 6E 00 66 00 6F 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "32 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 80 48 3A 08 59 56 C1 01 80 48 3A 08 59 56 C1 01 00 00 00 00 00 00 00 00 00 00 00 00",
- "44 00 65 00 61 00 6C 00 20 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 01 01 FF FF FF FF FF FF FF FF 19 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 80 48 3A 08 59 56 C1 01 80 B2 52 08 59 56 C1 01 00 00 00 00 00 00 00 00 00 00 00 00",
- "44 00 65 00 61 00 6C 00 20 00 44 00 65 00 73 00 63 00 72 00 69 00 70 00 74 00 69 00 6F 00 6E 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 02 00 16 00 00 00 26 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0A 00 00 00 09 00 00 00 00 00 00 00",
- "53 00 61 00 6C 00 65 00 73 00 20 00 41 00 72 00 65 00 61 00 20 00 43 00 6F 00 64 00 65 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "20 00 02 01 12 00 00 00 2A 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0B 00 00 00 04 00 00 00 00 00 00 00",
- "44 00 65 00 61 00 6C 00 20 00 43 00 75 00 72 00 72 00 65 00 6E 00 63 00 79 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1C 00 02 01 1B 00 00 00 20 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0C 00 00 00 07 00 00 00 00 00 00 00",
- "4F 00 75 00 74 00 62 00 6F 00 75 00 6E 00 64 00 20 00 54 00 72 00 61 00 76 00 65 00 6C 00 20 00",
- "44 00 61 00 74 00 65 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2C 00 02 01 25 00 00 00 27 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0D 00 00 00 21 00 00 00 00 00 00 00",
- "4D 00 61 00 78 00 69 00 6D 00 75 00 6D 00 20 00 53 00 74 00 61 00 79 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1A 00 02 01 14 00 00 00 10 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0E 00 00 00 05 00 00 00 00 00 00 00",
- "4D 00 61 00 78 00 69 00 6D 00 75 00 6D 00 20 00 53 00 74 00 61 00 79 00 20 00 50 00 65 00 72 00",
- "69 00 6F 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 00 18 00 00 00 23 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0F 00 00 00 04 00 00 00 00 00 00 00",
- "44 00 65 00 61 00 6C 00 20 00 54 00 79 00 70 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "14 00 02 00 15 00 00 00 28 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 10 00 00 00 04 00 00 00 00 00 00 00",
- "53 00 75 00 62 00 20 00 44 00 65 00 61 00 6C 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "12 00 02 01 2E 00 00 00 1F 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 11 00 00 00 04 00 00 00 00 00 00 00",
- "43 00 6F 00 6D 00 6D 00 69 00 73 00 73 00 69 00 6F 00 6E 00 20 00 56 00 61 00 6C 00 75 00 65 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 12 00 00 00 04 00 00 00 00 00 00 00",
- "46 00 61 00 72 00 65 00 20 00 54 00 79 00 70 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "14 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 13 00 00 00 04 00 00 00 00 00 00 00",
- "46 00 55 00 44 00 20 00 47 00 72 00 69 00 64 00 20 00 44 00 69 00 6D 00 65 00 6E 00 73 00 69 00",
- "6F 00 6E 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 14 00 00 00 04 00 00 00 00 00 00 00",
- "46 00 55 00 44 00 20 00 47 00 72 00 69 00 64 00 20 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00",
- "74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2A 00 02 01 0F 00 00 00 1A 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "44 00 6F 00 75 00 62 00 6C 00 65 00 20 00 44 00 65 00 61 00 6C 00 69 00 6E 00 67 00 20 00 49 00",
- "6E 00 64 00 69 00 63 00 61 00 74 00 6F 00 72 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "32 00 02 01 11 00 00 00 21 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 15 00 00 00 04 00 00 00 00 00 00 00",
- "42 00 75 00 73 00 69 00 6E 00 65 00 73 00 73 00 20 00 54 00 79 00 70 00 65 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1C 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 16 00 00 00 04 00 00 00 00 00 00 00",
- "55 00 6D 00 62 00 72 00 65 00 6C 00 6C 00 61 00 20 00 4C 00 69 00 6E 00 6B 00 73 00 20 00 61 00",
- "6E 00 64 00 20 00 50 00 61 00 73 00 73 00 65 00 6E 00 67 00 65 00 72 00 73 00 00 00 00 00 00 00",
- "3C 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "41 00 67 00 65 00 6E 00 74 00 73 00 20 00 4E 00 61 00 6D 00 65 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "18 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 17 00 00 00 04 00 00 00 00 00 00 00",
- "4E 00 75 00 6D 00 62 00 65 00 72 00 20 00 6F 00 66 00 20 00 50 00 61 00 73 00 73 00 65 00 6E 00",
- "67 00 65 00 72 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2A 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 18 00 00 00 04 00 00 00 00 00 00 00",
- "41 00 4C 00 43 00 20 00 43 00 6F 00 64 00 65 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "14 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "43 00 6F 00 6E 00 73 00 6F 00 72 00 74 00 69 00 61 00 20 00 43 00 6F 00 64 00 65 00 73 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "20 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "43 00 68 00 69 00 6C 00 64 00 20 00 50 00 65 00 72 00 63 00 65 00 6E 00 74 00 61 00 67 00 65 00",
- "20 00 50 00 65 00 72 00 6D 00 69 00 74 00 74 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "36 00 02 01 24 00 00 00 2C 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 19 00 00 00 04 00 00 00 00 00 00 00",
- "50 00 65 00 72 00 63 00 65 00 6E 00 74 00 61 00 67 00 65 00 20 00 6F 00 66 00 20 00 59 00 69 00",
- "65 00 6C 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1A 00 00 00 04 00 00 00 00 00 00 00",
- "4E 00 65 00 74 00 20 00 52 00 65 00 6D 00 69 00 74 00 20 00 50 00 65 00 72 00 6D 00 69 00 74 00",
- "74 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 01 29 00 00 00 22 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1B 00 00 00 04 00 00 00 00 00 00 00",
- "49 00 6E 00 66 00 61 00 6E 00 74 00 20 00 44 00 69 00 73 00 63 00 6F 00 75 00 6E 00 74 00 20 00",
- "50 00 65 00 72 00 6D 00 69 00 74 00 74 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "34 00 02 01 2D 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1C 00 00 00 04 00 00 00 00 00 00 00",
- "49 00 6E 00 66 00 61 00 6E 00 74 00 20 00 44 00 69 00 73 00 63 00 6F 00 75 00 6E 00 74 00 20 00",
- "56 00 61 00 6C 00 75 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2C 00 02 01 1E 00 00 00 2F 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1D 00 00 00 04 00 00 00 00 00 00 00",
- "54 00 52 00 56 00 41 00 20 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 02 01 30 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1E 00 00 00 04 00 00 00 00 00 00 00",
- "42 00 75 00 73 00 69 00 6E 00 65 00 73 00 73 00 20 00 4A 00 75 00 73 00 74 00 69 00 66 00 69 00",
- "63 00 61 00 74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2E 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1F 00 00 00 04 00 00 00 00 00 00 00",
- "53 00 75 00 72 00 63 00 68 00 61 00 72 00 67 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "14 00 02 01 17 00 00 00 1D 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 20 00 00 00 04 00 00 00 00 00 00 00",
- "4E 00 61 00 74 00 75 00 72 00 65 00 20 00 6F 00 66 00 20 00 56 00 61 00 72 00 69 00 61 00 74 00",
- "69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 21 00 00 00 50 00 00 00 00 00 00 00",
- "4F 00 74 00 68 00 65 00 72 00 20 00 52 00 65 00 66 00 75 00 6E 00 64 00 20 00 54 00 65 00 78 00",
- "74 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "24 00 02 01 0E 00 00 00 13 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 23 00 00 00 04 00 00 00 00 00 00 00",
- "43 00 61 00 6E 00 63 00 65 00 6C 00 6C 00 61 00 74 00 69 00 6F 00 6E 00 20 00 46 00 65 00 65 00",
- "20 00 50 00 65 00 72 00 63 00 65 00 6E 00 74 00 61 00 67 00 65 00 00 00 00 00 00 00 00 00 00 00",
- "38 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 24 00 00 00 04 00 00 00 00 00 00 00",
- "43 00 61 00 6E 00 63 00 65 00 6C 00 6C 00 61 00 74 00 69 00 6F 00 6E 00 20 00 46 00 65 00 65 00",
- "20 00 46 00 69 00 78 00 65 00 64 00 20 00 56 00 61 00 6C 00 75 00 65 00 00 00 00 00 00 00 00 00",
- "3A 00 02 01 2B 00 00 00 1C 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 25 00 00 00 04 00 00 00 00 00 00 00",
- "43 00 61 00 6E 00 63 00 65 00 6C 00 6C 00 61 00 74 00 69 00 6F 00 6E 00 20 00 46 00 65 00 65 00",
- "20 00 43 00 75 00 72 00 72 00 65 00 6E 00 63 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "34 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 26 00 00 00 07 00 00 00 00 00 00 00",
- "52 00 65 00 6D 00 61 00 72 00 6B 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "10 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "4F 00 74 00 68 00 65 00 72 00 20 00 43 00 61 00 72 00 72 00 69 00 65 00 72 00 20 00 53 00 65 00",
- "63 00 74 00 6F 00 72 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2C 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 27 00 00 00 04 00 00 00 00 00 00 00",
- "50 00 72 00 6F 00 72 00 61 00 74 00 65 00 20 00 43 00 6F 00 6D 00 6D 00 65 00 6E 00 74 00 73 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 02 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "4E 00 65 00 67 00 6F 00 74 00 69 00 61 00 74 00 69 00 6F 00 6E 00 20 00 49 00 6E 00 66 00 6F 00",
- "72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "30 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 80 B2 52 08 59 56 C1 01 80 B2 52 08 59 56 C1 01 00 00 00 00 00 00 00 00 00 00 00 00",
- "52 00 65 00 73 00 74 00 72 00 69 00 63 00 74 00 65 00 64 00 20 00 43 00 61 00 72 00 72 00 69 00",
- "65 00 72 00 73 00 20 00 26 00 20 00 53 00 74 00 6E 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "36 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 04 00 00 00 00 00 00 00",
- "41 00 64 00 64 00 69 00 74 00 69 00 6F 00 6E 00 61 00 6C 00 20 00 43 00 6F 00 6D 00 6D 00 65 00",
- "6E 00 74 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 04 00 00 00 00 00 00 00",
- "52 00 65 00 76 00 65 00 6E 00 75 00 65 00 20 00 4D 00 61 00 6E 00 61 00 67 00 65 00 6D 00 65 00",
- "6E 00 74 00 20 00 43 00 6F 00 6D 00 6D 00 65 00 6E 00 74 00 73 00 00 00 00 00 00 00 00 00 00 00",
- "38 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 04 00 00 00 00 00 00 00",
- "52 00 65 00 76 00 65 00 6E 00 75 00 65 00 20 00 4D 00 61 00 6E 00 61 00 67 00 65 00 6D 00 65 00",
- "6E 00 74 00 20 00 52 00 65 00 66 00 65 00 72 00 65 00 6E 00 63 00 65 00 00 00 00 00 00 00 00 00",
- "3A 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 04 00 00 00 00 00 00 00",
- "4D 00 69 00 6E 00 69 00 6D 00 75 00 6D 00 20 00 53 00 74 00 61 00 79 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1A 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 04 00 00 00 00 00 00 00",
- "43 00 72 00 65 00 61 00 74 00 65 00 64 00 20 00 42 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "16 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 1B 00 00 00 00 00 00 00",
- "4F 00 6E 00 20 00 42 00 65 00 68 00 61 00 6C 00 66 00 20 00 4F 00 66 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1A 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 04 00 00 00 00 00 00 00",
- "44 00 65 00 61 00 6C 00 20 00 50 00 61 00 67 00 65 00 20 00 41 00 75 00 74 00 68 00 20 00 4C 00",
- "6F 00 63 00 6B 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2A 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 10 00 00 00 00 00 00 00",
- "47 00 72 00 6F 00 75 00 70 00 20 00 41 00 75 00 74 00 68 00 6F 00 72 00 69 00 73 00 61 00 74 00",
- "69 00 6F 00 6E 00 20 00 49 00 6E 00 66 00 6F 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "32 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 80 B2 52 08 59 56 C1 01 40 DA 5B 08 59 56 C1 01 00 00 00 00 00 00 00 00 00 00 00 00",
- "4C 00 61 00 73 00 74 00 20 00 49 00 73 00 73 00 75 00 65 00 64 00 20 00 47 00 72 00 6F 00 75 00",
- "70 00 20 00 4E 00 75 00 6D 00 62 00 65 00 72 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "32 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 06 00 00 00 00 00 00 00",
- "47 00 72 00 6F 00 75 00 70 00 20 00 4E 00 75 00 6D 00 62 00 65 00 72 00 73 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1C 00 00 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- };
-
- byte[] testdata = RawDataUtil.decode(hexData);
- ByteArrayInputStream stream = new ByteArrayInputStream(testdata);
- RawDataBlock[] raw_data = new RawDataBlock[testdata.length / 512];
+ String hexData =
+ "H4sIAAAAAAAAANWZ624TRxTHxymXcm2ahjRNUrJAoCGESxFCCPEB22lSqoRECbGE+slxNvEqcYx2bUQ/lUfphz5BnwBegw88" +
+ "Ac8A/M/ZWe/E3svMbKjEseIdbyz/zpk5cy6z66KNV0c44jdxgKsv/hImMiaOi09SzuV8983Sv+8/uG9L32D8+Gx4bwH0huiK" +
+ "lnCZ7+COi2tdeGJfBLn8y0KUPimS9J1//r7+7fPa29Ib51e+qv+rwmIXtA54bWjgiKf4RNroyZQGv18+4nvzqfwF/vSl+afl" +
+ "eBm0gOd9A6OX4G4b6eAU5EckRyzyihPfRMYK8/v9r4aRjzkJ1yNPdPwviX9Mjiuwv4FXEzoE0vvrmAfyQ9Jqi7VJl9mC/EH7" +
+ "l/nOnuZKOEfOj2fgGWLRixwvvGbJP5HKL+PTNla/o/NT4qIGP4o7r39/OBB/NrHqtMIqlyz3ZQTME1v/q8hxlb28w7wGs5d4" +
+ "Jly+E0elJ3jfwbhf7mrwI7uT7I9XOyL4WIuYnG9/qcf/KeU7Pf5/6xl8GgWYAx/kFwb8IYpB5IdCd/4p9pyS4w2mu7z3yzIX" +
+ "OLwq25rxd6g0guucAf8M/uL9F9lfhf/5rMEBZkG3CpgCf5L10OdT6j8px6ugdhDl2rgecO4JfZ8y0b6SidIqgXnwr+L6iwGf" +
+ "6pRLcryC33+FtW5xDKAsSLWHfg00Af4orsMG/PP4O57Dd8Qa70GPPSFdZuF/47heMeB/J5LWXyfaDsoo+BdYD33+sMLfgN1b" +
+ "StQ3lRHM/y1cpw343yt82mktvDx4WNCLdjXWpasxG9j/xvF3ROEvguRz/WM//6b8Hw7xNzH3FPXJ18Laz5PZMJqPrCp81sL+" +
+ "0Uy+WR6YA5/8eULor/9H5XsLHHm2OAbHXuiBuCt1oZzcYE3aCZXYXfDJny4Z8C8o9le47vM44wacBcz8YMpi/ccU/ibXmD5H" +
+ "233OPcuszR7rUpcxeY27hIC9YlfWx6E8suCr81/m36MKJDDuvUjGLfg/KvarVbaDFd7JtHZQ5iz44wq/jPmuKhk/v+M9LDb7" +
+ "X53/qtzh5Nu01+qGujiF+U2uc7d7Ga8h/aHOcx/dbXFl3BnoSu5j/80IqgP09x/VidH8JzNDP3gOpsu6pcushf0TQvU/l6vu" +
+ "dVxbsvrPtniAX7ouuA/Qtn9S4YfRtt7rvTyugcNqTEeXe+DflGxd/pQBPy8TU/2HHkzcNrD/Z4X/DDNfwy607z+GSneEmf0X" +
+ "RVb8/4PvEH+nl3nSdbllkX+nxeH6y+fzB6pDdm3qjxLFU5pTXb4jVP8n+7qyBgr3XY118bRWwWb/Ua5ek+NVMJoy+tMe3FH6" +
+ "EBeVed4pwAzsp3qeaipdPtXqcf1Z534ryr9xx72Ie25KVIzlgYX9M0Z8Opd7Jc8FB3fjQ9h/Q4R7Wpd/1Yif3Zfes7CfevWo" +
+ "/wzjLvnbnnHuJRkumP9U/6uyHj5nHZ97QZfPZNoZFci8BZ965Tj/+fz70Sls1A9FNVmeXC5oP+W/XX4C4Ymk86a8aHxH5/xJ" +
+ "nvsknf+sc9zt8Kw3ZIbrXwmKytdkb97fDd0veP5ZBi889QstjM5idFeh6Pkv2f+SOV1e/xXej2GUic9E0/V58L/ww8js9qKA" +
+ "Gn+K8Vc49xY5/ynGj5//hJ5XMX7+ZseflONV3m0V0Jvse5R/V/GuK0Xtj8+f1nrVd5nPBJvKs4is/suOPyzHSxz/uui4Y26b" +
+ "d35wdOffMu48fvfnQPyJn7894fqvK/1A1SvrSZAOP8n+6PlHGkc3F9o+f9T8eS0x5R+1fM38zxmfK1AAIAAA";
+
+ final byte[] testdata = RawDataUtil.decompress(hexData);
+ final ByteArrayInputStream stream = new ByteArrayInputStream(testdata);
+ final List<Property> properties = new ArrayList<>();
+
+ final byte[] buf = new byte[512];
+ for (int readBytes; (readBytes = stream.read(buf)) != -1; ) {
+ byte[] bbuf = buf;
+ if (readBytes < 512) {
+ bbuf = new byte[readBytes];
+ System.arraycopy(buf, 0, bbuf, 0, readBytes);
+ }
- for (int j = 0; j < raw_data.length; j++) {
- raw_data[j] = new RawDataBlock(stream);
+ PropertyFactory.convertToProperties(bbuf, properties);
}
- List<Property> properties = PropertyFactory.convertToProperties(raw_data);
assertEquals(64, properties.size());
String[] names = {
package org.apache.poi.poifs.property;
+import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
import org.apache.poi.hpsf.DocumentSummaryInformation;
import org.apache.poi.hpsf.SummaryInformation;
import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.storage.BlockAllocationTableReader;
+import org.apache.poi.poifs.filesystem.POIFSStream;
import org.apache.poi.poifs.storage.HeaderBlock;
-import org.apache.poi.poifs.storage.RawDataBlockList;
import org.apache.poi.poifs.storage.RawDataUtil;
import org.junit.Test;
*/
public final class TestPropertyTable {
- private static void confirmBlockEncoding(String[] expectedDataHexDumpLines, PropertyTable table) {
- byte[] expectedData = RawDataUtil.decode(expectedDataHexDumpLines);
- ByteArrayOutputStream stream = new ByteArrayOutputStream();
+ private static class MyPOIFSStream extends POIFSStream {
+ final ByteArrayOutputStream bos = new ByteArrayOutputStream();
+
+ MyPOIFSStream() {
+ super(null);
+ }
+
+ public void write(byte[] b, int off, int len) {
+ bos.write(b, off, len);
+ }
+
+ @Override
+ public OutputStream getOutputStream() {
+ return bos;
+ }
+ }
+
+ private static void confirmBlockEncoding(String expectedDataStr, PropertyTable table) throws IOException {
+
+ byte[] expectedData = RawDataUtil.decompress(expectedDataStr);
+ MyPOIFSStream stream = new MyPOIFSStream();
try {
- table.writeBlocks(stream);
+ table.write(stream);
} catch (IOException e) {
throw new RuntimeException(e);
}
- byte[] output = stream.toByteArray();
+ byte[] output = stream.bos.toByteArray();
- assertEquals("length check #1", expectedData.length, output.length);
- for (int j = 0; j < expectedData.length; j++) {
- assertEquals("content check #1: mismatch at offset " + j, expectedData[j], output[j]);
- }
+ assertArrayEquals(expectedData, output);
}
/**
root.addChild(summary2);
table.preWrite();
- String[] testblock = {
- "52 00 6F 00 6F 00 74 00 20 00 45 00 6E 00 74 00 72 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "16 00 05 01 FF FF FF FF FF FF FF FF 02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "57 00 6F 00 72 00 6B 00 62 00 6F 00 6F 00 6B 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "12 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 77 67 04 00 00 00 00 00",
- "05 00 53 00 75 00 6D 00 6D 00 61 00 72 00 79 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00",
- "69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 01 01 00 00 00 03 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 34 02 00 00 00 10 00 00 00 00 00 00",
- "05 00 44 00 6F 00 63 00 75 00 6D 00 65 00 6E 00 74 00 53 00 75 00 6D 00 6D 00 61 00 72 00 79 00",
- "49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00",
- "38 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 3C 02 00 00 00 10 00 00 00 00 00 00",
- };
+ final String testblock =
+ "H4sIAAAAAAAAAAtiyAfCEgYFBleGPCBdxFDJQAoQY2Bl/A8FTETq+QdUC2OHA20vYshmSAK7I5sku0FAiIEJbv9/JHMJgfJ0FjDN" +
+ "yhDMUMqQC4SJYL97AkMhDewmkEgJQyaQnYfHHA2g/YxAmhmIibXfBBRQAgxQ+12ANiSD3ZAKjgHS3GNBhv9tkOwHAFGXmbcAAgAA";
confirmBlockEncoding(testblock, table);
table.removeProperty(summary1);
root.deleteChild(summary1);
table.preWrite();
- String[] testblock2 = {
- "52 00 6F 00 6F 00 74 00 20 00 45 00 6E 00 74 00 72 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "16 00 05 01 FF FF FF FF FF FF FF FF 02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "57 00 6F 00 72 00 6B 00 62 00 6F 00 6F 00 6B 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "12 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 77 67 04 00 00 00 00 00",
- "05 00 44 00 6F 00 63 00 75 00 6D 00 65 00 6E 00 74 00 53 00 75 00 6D 00 6D 00 61 00 72 00 79 00",
- "49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00",
- "38 00 02 01 01 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 3C 02 00 00 00 10 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- };
+ final String testblock2 =
+ "H4sIAAAAAAAAAAtiyAfCEgYFBleGPCBdxFDJQAoQY2Bl/A8FTETq+QdUC2OHA20vYshmSAK7I5sku0FAiIEJbv9/JHMJ" +
+ "gfJ0FjDNyuACtDeZoZQhlyEVHALBYHYuQyI4LDyBYmlgN4JEShgygew8JHMsgPYzAmlS7LcBBZQAhA0Ae5Y5UIABAAA=";
+ // (N)POIFS only returns 384 bytes here, instead of 512
confirmBlockEncoding(testblock2, table);
table.addProperty(summary1);
root.addChild(summary1);
table.preWrite();
- String[] testblock3 = {
- "52 00 6F 00 6F 00 74 00 20 00 45 00 6E 00 74 00 72 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "16 00 05 01 FF FF FF FF FF FF FF FF 03 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 00 00 00",
- "57 00 6F 00 72 00 6B 00 62 00 6F 00 6F 00 6B 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "12 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 77 67 04 00 00 00 00 00",
- "05 00 44 00 6F 00 63 00 75 00 6D 00 65 00 6E 00 74 00 53 00 75 00 6D 00 6D 00 61 00 72 00 79 00",
- "49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00",
- "38 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 3C 02 00 00 00 10 00 00 00 00 00 00",
- "05 00 53 00 75 00 6D 00 6D 00 61 00 72 00 79 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00",
- "69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 01 01 00 00 00 02 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 34 02 00 00 00 10 00 00 00 00 00 00",
- };
+ final String testblock3 =
+ "H4sIAAAAAAAAAAtiyAfCEgYFBleGPCBdxFDJQAoQY2Bl/A8FzETq+QdUC2OHA20vYshmSAK7I5sku0FAiIEJbv9/JHMJgfJ0FjDNyu" +
+ "ACtDeZoZQhlyEVHALBYHYuQyI4LDyBYmlgN4JEShgygew8JHMsyLDfhglICDBA7SfNPnSgAbSfEUiDjCTWfhMk+wEk2TJjAAIAAA==";
confirmBlockEncoding(testblock3, table);
}
+ @Test
public void testReadingConstructor() throws IOException {
// first, we need the raw data blocks
- String[] raw_data_array = {
- "52 00 6F 00 6F 00 74 00 20 00 45 00 6E 00 74 00 72 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "16 00 05 01 FF FF FF FF FF FF FF FF 01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0A 00 00 00 80 07 00 00 00 00 00 00",
- "44 00 65 00 61 00 6C 00 20 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 01 01 FF FF FF FF FF FF FF FF 15 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "46 00 55 00 44 00 20 00 47 00 72 00 69 00 64 00 20 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00",
- "74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2A 00 02 01 FF FF FF FF 0E 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "44 00 6F 00 75 00 62 00 6C 00 65 00 20 00 44 00 65 00 61 00 6C 00 69 00 6E 00 67 00 20 00 49 00",
- "6E 00 64 00 69 00 63 00 61 00 74 00 6F 00 72 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "32 00 02 01 FF FF FF FF 09 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00",
- "43 00 68 00 69 00 6C 00 64 00 20 00 50 00 65 00 72 00 63 00 65 00 6E 00 74 00 61 00 67 00 65 00",
- "20 00 50 00 65 00 72 00 6D 00 69 00 74 00 74 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "36 00 02 01 FF FF FF FF 07 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 01 00 00 00 04 00 00 00 00 00 00 00",
- "43 00 61 00 6E 00 63 00 65 00 6C 00 6C 00 61 00 74 00 69 00 6F 00 6E 00 20 00 46 00 65 00 65 00",
- "20 00 46 00 69 00 78 00 65 00 64 00 20 00 56 00 61 00 6C 00 75 00 65 00 00 00 00 00 00 00 00 00",
- "3A 00 02 01 FF FF FF FF 06 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 02 00 00 00 04 00 00 00 00 00 00 00",
- "55 00 6D 00 62 00 72 00 65 00 6C 00 6C 00 61 00 20 00 4C 00 69 00 6E 00 6B 00 73 00 20 00 61 00",
- "6E 00 64 00 20 00 50 00 61 00 73 00 73 00 65 00 6E 00 67 00 65 00 72 00 73 00 00 00 00 00 00 00",
- "3C 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "43 00 61 00 6E 00 63 00 65 00 6C 00 6C 00 61 00 74 00 69 00 6F 00 6E 00 20 00 46 00 65 00 65 00",
- "20 00 50 00 65 00 72 00 63 00 65 00 6E 00 74 00 61 00 67 00 65 00 00 00 00 00 00 00 00 00 00 00",
- "38 00 02 01 FF FF FF FF 05 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 03 00 00 00 04 00 00 00 00 00 00 00",
- "49 00 6E 00 66 00 61 00 6E 00 74 00 20 00 44 00 69 00 73 00 63 00 6F 00 75 00 6E 00 74 00 20 00",
- "50 00 65 00 72 00 6D 00 69 00 74 00 74 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "34 00 02 01 FF FF FF FF 04 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 04 00 00 00 04 00 00 00 00 00 00 00",
- "43 00 61 00 6E 00 63 00 65 00 6C 00 6C 00 61 00 74 00 69 00 6F 00 6E 00 20 00 46 00 65 00 65 00",
- "20 00 43 00 75 00 72 00 72 00 65 00 6E 00 63 00 79 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "34 00 02 01 FF FF FF FF 08 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 05 00 00 00 07 00 00 00 00 00 00 00",
- "4F 00 75 00 74 00 62 00 6F 00 75 00 6E 00 64 00 20 00 54 00 72 00 61 00 76 00 65 00 6C 00 20 00",
- "44 00 61 00 74 00 65 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2C 00 02 01 FF FF FF FF 0B 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 06 00 00 00 21 00 00 00 00 00 00 00",
- "42 00 75 00 73 00 69 00 6E 00 65 00 73 00 73 00 20 00 4A 00 75 00 73 00 74 00 69 00 66 00 69 00",
- "63 00 61 00 74 00 69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2E 00 02 01 FF FF FF FF 03 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 07 00 00 00 04 00 00 00 00 00 00 00",
- "49 00 6E 00 66 00 61 00 6E 00 74 00 20 00 44 00 69 00 73 00 63 00 6F 00 75 00 6E 00 74 00 20 00",
- "56 00 61 00 6C 00 75 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2C 00 02 01 FF FF FF FF 0D 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 08 00 00 00 04 00 00 00 00 00 00 00",
- "4F 00 74 00 68 00 65 00 72 00 20 00 43 00 61 00 72 00 72 00 69 00 65 00 72 00 20 00 53 00 65 00",
- "63 00 74 00 6F 00 72 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2C 00 02 01 FF FF FF FF 0A 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 09 00 00 00 04 00 00 00 00 00 00 00",
- "4E 00 75 00 6D 00 62 00 65 00 72 00 20 00 6F 00 66 00 20 00 50 00 61 00 73 00 73 00 65 00 6E 00",
- "67 00 65 00 72 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "2A 00 02 01 FF FF FF FF 0C 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0A 00 00 00 04 00 00 00 00 00 00 00",
- "53 00 61 00 6C 00 65 00 73 00 20 00 41 00 72 00 65 00 61 00 20 00 43 00 6F 00 64 00 65 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "20 00 02 01 1C 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0B 00 00 00 04 00 00 00 00 00 00 00",
- "4F 00 74 00 68 00 65 00 72 00 20 00 52 00 65 00 66 00 75 00 6E 00 64 00 20 00 54 00 65 00 78 00",
- "74 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "24 00 02 01 17 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0C 00 00 00 04 00 00 00 00 00 00 00",
- "4D 00 61 00 78 00 69 00 6D 00 75 00 6D 00 20 00 53 00 74 00 61 00 79 00 20 00 50 00 65 00 72 00",
- "69 00 6F 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 01 FF FF FF FF 14 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0D 00 00 00 04 00 00 00 00 00 00 00",
- "4E 00 65 00 74 00 20 00 52 00 65 00 6D 00 69 00 74 00 20 00 50 00 65 00 72 00 6D 00 69 00 74 00",
- "74 00 65 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 01 FF FF FF FF 13 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0E 00 00 00 04 00 00 00 00 00 00 00",
- "50 00 65 00 72 00 63 00 65 00 6E 00 74 00 61 00 67 00 65 00 20 00 6F 00 66 00 20 00 59 00 69 00",
- "65 00 6C 00 64 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 01 FF FF FF FF 02 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 0F 00 00 00 04 00 00 00 00 00 00 00",
- "4E 00 61 00 74 00 75 00 72 00 65 00 20 00 6F 00 66 00 20 00 56 00 61 00 72 00 69 00 61 00 74 00",
- "69 00 6F 00 6E 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 01 FF FF FF FF 12 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 10 00 00 00 50 00 00 00 00 00 00 00",
- "46 00 55 00 44 00 20 00 47 00 72 00 69 00 64 00 20 00 44 00 69 00 6D 00 65 00 6E 00 73 00 69 00",
- "6F 00 6E 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "28 00 02 01 10 00 00 00 11 00 00 00 FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 12 00 00 00 04 00 00 00 00 00 00 00",
- "44 00 65 00 61 00 6C 00 20 00 44 00 65 00 73 00 63 00 72 00 69 00 70 00 74 00 69 00 6F 00 6E 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 02 01 19 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 13 00 00 00 09 00 00 00 00 00 00 00",
- "54 00 52 00 56 00 41 00 20 00 49 00 6E 00 66 00 6F 00 72 00 6D 00 61 00 74 00 69 00 6F 00 6E 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 02 01 18 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 14 00 00 00 04 00 00 00 00 00 00 00",
- "50 00 72 00 6F 00 72 00 61 00 74 00 65 00 20 00 43 00 6F 00 6D 00 6D 00 65 00 6E 00 74 00 73 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 02 01 16 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "43 00 6F 00 6D 00 6D 00 69 00 73 00 73 00 69 00 6F 00 6E 00 20 00 56 00 61 00 6C 00 75 00 65 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "22 00 02 01 0F 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 15 00 00 00 04 00 00 00 00 00 00 00",
- "4D 00 61 00 78 00 69 00 6D 00 75 00 6D 00 20 00 53 00 74 00 61 00 79 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1A 00 02 01 20 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 16 00 00 00 05 00 00 00 00 00 00 00",
- "44 00 65 00 61 00 6C 00 20 00 43 00 75 00 72 00 72 00 65 00 6E 00 63 00 79 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1C 00 02 01 1D 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 17 00 00 00 07 00 00 00 00 00 00 00",
- "43 00 6F 00 6E 00 73 00 6F 00 72 00 74 00 69 00 61 00 20 00 43 00 6F 00 64 00 65 00 73 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "20 00 02 01 1B 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "42 00 75 00 73 00 69 00 6E 00 65 00 73 00 73 00 20 00 54 00 79 00 70 00 65 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "1C 00 02 01 1A 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 18 00 00 00 04 00 00 00 00 00 00 00",
- "44 00 65 00 61 00 6C 00 20 00 54 00 79 00 70 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "14 00 02 01 23 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 19 00 00 00 04 00 00 00 00 00 00 00",
- "53 00 75 00 72 00 63 00 68 00 61 00 72 00 67 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "14 00 02 01 21 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1A 00 00 00 04 00 00 00 00 00 00 00",
- "41 00 67 00 65 00 6E 00 74 00 73 00 20 00 4E 00 61 00 6D 00 65 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "18 00 02 01 1F 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1B 00 00 00 04 00 00 00 00 00 00 00",
- "46 00 61 00 72 00 65 00 20 00 54 00 79 00 70 00 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "14 00 02 01 1E 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1C 00 00 00 04 00 00 00 00 00 00 00",
- "53 00 75 00 62 00 20 00 44 00 65 00 61 00 6C 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "12 00 02 01 24 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 1D 00 00 00 04 00 00 00 00 00 00 00",
- "41 00 4C 00 43 00 20 00 43 00 6F 00 64 00 65 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "14 00 02 01 22 00 00 00 FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "52 00 65 00 6D 00 61 00 72 00 6B 00 73 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "10 00 02 01 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 03 00 47 42 50 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 1D 00 28 41 29 31 36 2D 4F 63 74 2D 32 30 30 31 20 74 6F 20 31 36 2D 4F 63 74 2D 32 30 30",
- "31 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 01 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00",
- "02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00",
- "02 00 00 00 08 00 00 00 02 00 00 00 08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 18 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 05 00 6A 61 6D 65 73 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 01 00 31 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 03 00 47 42 50 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "08 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "02 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
- "FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
- "11 00 00 00 FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF",
- "FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FE FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "01 00 00 00 02 00 00 00 03 00 00 00 04 00 00 00 05 00 00 00 06 00 00 00 07 00 00 00 08 00 00 00",
- "09 00 00 00 FE FF FF FF 0B 00 00 00 0C 00 00 00 0D 00 00 00 FE FF FF FF FE FF FF FF FE FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- };
-
- RawDataBlockList data_blocks = new RawDataBlockList(new ByteArrayInputStream(RawDataUtil
- .decode(raw_data_array)), POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- int[] bat_array = { 15 };
-
- // need to initialize the block list with a block allocation
- // table
- new BlockAllocationTableReader(
- POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, 1, bat_array, 0, -2, data_blocks);
-
- // Fake up a header
- HeaderBlock header_block = new HeaderBlock(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- header_block.setPropertyStart(0);
+ String raw_data_array =
+ "H4sIAAAAAAAAAO2Z608TQRDA5wqVlreIyFMP5AMaMUAMMcYvQIVoUAivxI9HOeCkpaS9GvjmX67OzO61S1930zOpJJ2muUd" +
+ "u9zfv3V4PoIAfH2z4BNd4LMIdSGQCktYfLVbEMf34/dWnzjPgggM55H9G/jnqUoQ83vHBw/Pr0LkWwKrwn4o0V7INx6iDDT" +
+ "vI9eBMrMdrSDB/GM/pKOVncPYynKIHXGQH3vCQeKF1OcOrLGtCOtXKmuanhfxefdyCS5w/x5bvI72ILJczwUEN3MrdPD7l4" +
+ "8fFJ01Z1/w+Ad+6x3eQRswcfqr+tjEyLvO38c4tc204Ye+U8SqQD5r/SMBPGPxjtOwU7Qv4Nuyy96+ghOcO+5984OB1iT1z" +
+ "wf4o6fEfNT+QKHxTwu1vFJWqvNf8pMD+HsN+le0Oz03556FlWc5Jdad19AHeaX6vgN8LkvhvoS5FjhA9V9udAn5KwCdf6fY" +
+ "Dezi7jxmgLKZYHyHLgZ+sEXnEYbtLTeZ6o/kDAj7l6rw+30RuiTPO5Qyz4QvfIT+cVyq/eQ96q/k9Aj7ZHjX+9RXX2P4hAT" +
+ "9l8PeQcsk5ZnMuFLkPq+tDPGZ13wvzf7+Anzb439A26gCKWEBftKr2egn6/6CA32/wD9m/Lkd+g7PcYU8UMBeb+dwUG/mzm" +
+ "h2VPwCN/X+Ax3OjDlzsu37IXIvIfybkDxr8r2jvLUY8z3GgmFOPu6t0Ho890VyWtP/HIbr/h8CMv8t5TrarLhfe8xrxnwj4" +
+ "wwa/8Zqr8vA7V0IuEj8h4I+AaT/1lzJnXsA94Tr0Iu3CAv6YgD/Kdiup339lOBvIHyVNb159ik/zPRbwSdfA/ur+M8NVmGU" +
+ "9bgT7z4Q1BbL8p1xJ6/MjzLwTrPz2978Ja1LIp1qp5l+RmWqVU50nr/3vt/R8lT8h5JsS8DzuuMHaH7bq3OePCPn0OyGs/0" +
+ "SVaeTbQj75K6nPq/nXep/TTGaRPyfkU78O9j9busIoB3yu+erqEx59tf7MCPmm1O9/jtD2m0hrHwnZPy3kU73U17+MG8g48" +
+ "l8K+VNgrv9l7v+X3HMv2uLPC/nTBn+DmarWbV4N8iIdJpH/QsifMfjbbLcby//PhfxZuO//U+OXt1TGkL8o5M+B6f9drDdZ" +
+ "zZlC9i8I+aaofQ/F4ErMJhmN+fs3rgT7ni6/PX7teKnEHZ/q8Pj4+vfAzuZ+jPFzsLTxanV9eS/rL6+trKzafsE2LkPHP3T" +
+ "/Pezx8evH6rj+Kd0H/sVRzp+MaX8Sfjh5t9Tm+M7nrwVhNd56fNz+063/OOPj2t9p+R3zS+9d2hnXlf9DLN27g/+E6L0E/T" +
+ "/Rp/t5WseX3hnTe9uhmnh35WHLX544XEIAIAAA";
+
+ // Fake up a header
+ HeaderBlock header_block = new HeaderBlock(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
+ header_block.setPropertyStart(0);
+
+ List<ByteBuffer> data_blocks = new ArrayList<>();
+ try (InputStream is = new ByteArrayInputStream(RawDataUtil.decompress(raw_data_array))) {
+ byte[] buf = new byte[header_block.getBigBlockSize().getBigBlockSize()];
+
+ for (int readBytes; (readBytes = is.read(buf)) != -1; ) {
+ data_blocks.add(ByteBuffer.wrap(buf.clone(), 0, readBytes));
+ }
+ }
+
// get property table from the document
PropertyTable table = new PropertyTable(header_block, data_blocks);
assertEquals(30 * 64, table.getRoot().getSize());
int count = 0;
- Property child = null;
+ Property lastChild = null;
for (Property p : table.getRoot()) {
- child = p;
+ assertNotNull(p);
+ lastChild = p;
++count;
}
- assertNotNull("no children found", child);
+ assertNotNull("no children found", lastChild);
assertEquals(1, count);
- assertTrue(child.isDirectory());
+ assertTrue(lastChild.isDirectory());
count = 0;
- for (Property p : (DirectoryProperty) child) {
- child = p;
+ for (Property p : (DirectoryProperty) lastChild) {
+ assertNotNull(p);
++count;
}
assertEquals(35, count);
@RunWith(Suite.class)
@Suite.SuiteClasses({
TestBATBlock.class,
- TestBlockAllocationTableReader.class,
- TestBlockAllocationTableWriter.class,
- TestBlockListImpl.class,
- TestDocumentBlock.class,
- TestHeaderBlockReading.class,
- TestHeaderBlockWriting.class,
- TestPropertyBlock.class,
- TestRawDataBlock.class,
- TestRawDataBlockList.class
+ TestHeaderBlockReading.class
})
public class AllPOIFSStorageTests {
}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.LittleEndian;
-import org.apache.poi.util.LittleEndianConsts;
-
-/**
- * Class LocalRawDataBlockList
- *
- * @author Marc Johnson(mjohnson at apache dot org)
- */
-public final class LocalRawDataBlockList extends RawDataBlockList {
- private final List<RawDataBlock> _list;
- private RawDataBlock[] _array;
-
- public LocalRawDataBlockList()
- throws IOException
- {
- super(new ByteArrayInputStream(new byte[ 0 ]), POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- _list = new ArrayList<>();
- _array = null;
- }
-
- /**
- * create and a new XBAT block
- *
- * @param start index of first BAT block
- * @param end index of last BAT block
- * @param chain index of next XBAT block
- */
- public void createNewXBATBlock(final int start, final int end,
- final int chain)
- throws IOException
- {
- byte[] data = new byte[ 512 ];
- int offset = 0;
-
- for (int k = start; k <= end; k++)
- {
- LittleEndian.putInt(data, offset, k);
- offset += LittleEndianConsts.INT_SIZE;
- }
- while (offset != 508)
- {
- LittleEndian.putInt(data, offset, -1);
- offset += LittleEndianConsts.INT_SIZE;
- }
- LittleEndian.putInt(data, offset, chain);
- add(new RawDataBlock(new ByteArrayInputStream(data)));
- }
-
- /**
- * create a BAT block and add it to the list
- *
- * @param start_index initial index for the block list
- */
- public void createNewBATBlock(final int start_index)
- throws IOException
- {
- byte[] data = new byte[ 512 ];
- int offset = 0;
-
- for (int j = 0; j < 128; j++)
- {
- int index = start_index + j;
-
- if (index % 256 == 0)
- {
- LittleEndian.putInt(data, offset, -1);
- }
- else if (index % 256 == 255)
- {
- LittleEndian.putInt(data, offset, -2);
- }
- else
- {
- LittleEndian.putInt(data, offset, index + 1);
- }
- offset += LittleEndianConsts.INT_SIZE;
- }
- add(new RawDataBlock(new ByteArrayInputStream(data)));
- }
-
- /**
- * fill the list with dummy blocks
- *
- * @param count of blocks
- */
- public void fill(final int count)
- throws IOException
- {
- int limit = 128 * count;
-
- for (int j = _list.size(); j < limit; j++)
- {
- add(new RawDataBlock(new ByteArrayInputStream(new byte[ 0 ])));
- }
- }
-
- /**
- * add a new block
- *
- * @param block new block to add
- */
- public void add(RawDataBlock block)
- {
- _list.add(block);
- }
-
- /**
- * override of remove method
- *
- * @param index of block to be removed
- *
- * @return desired block
- */
- @Override
- public ListManagedBlock remove(final int index)
- throws IOException
- {
- ensureArrayExists();
- RawDataBlock rvalue = null;
-
- try
- {
- rvalue = _array[ index ];
- if (rvalue == null)
- {
- throw new IOException("index " + index + " is null");
- }
- _array[ index ] = null;
- }
- catch (ArrayIndexOutOfBoundsException ignored)
- {
- throw new IOException("Cannot remove block[ " + index
- + " ]; out of range");
- }
- return rvalue;
- }
-
- /**
- * remove the specified block from the list
- *
- * @param index the index of the specified block; if the index is
- * out of range, that's ok
- */
- @Override
- public void zap(final int index)
- {
- ensureArrayExists();
- if ((index >= 0) && (index < _array.length))
- {
- _array[ index ] = null;
- }
- }
-
- private void ensureArrayExists()
- {
- if (_array == null)
- {
- _array = _list.toArray(new RawDataBlock[ 0 ]);
- }
- }
-
- @Override
- public int blockCount() {
- return _list.size();
- }
-}
package org.apache.poi.poifs.storage;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
-import junit.framework.TestCase;
-
import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;
+import org.junit.Test;
/**
* Class to test BATBlock functionality
- *
- * @author Marc Johnson
*/
-public final class TestBATBlock extends TestCase {
-
- /**
- * Test the createBATBlocks method. The test involves setting up
- * various arrays of int's and ensuring that the correct number of
- * BATBlocks is created for each array, and that the data from
- * each array is correctly written to the BATBlocks.
- */
- public void testCreateBATBlocks() throws IOException {
-
- // test 0 length array (basic sanity)
- BATBlock[] rvalue = BATBlock.createBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(0));
-
- assertEquals(0, rvalue.length);
-
- // test array of length 1
- rvalue = BATBlock.createBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(1));
- assertEquals(1, rvalue.length);
- verifyContents(rvalue, 1);
-
- // test array of length 127
- rvalue = BATBlock.createBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(127));
- assertEquals(1, rvalue.length);
- verifyContents(rvalue, 127);
-
- // test array of length 128
- rvalue = BATBlock.createBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(128));
- assertEquals(1, rvalue.length);
- verifyContents(rvalue, 128);
-
- // test array of length 129
- rvalue = BATBlock.createBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(129));
- assertEquals(2, rvalue.length);
- verifyContents(rvalue, 129);
- }
-
- private static int[] createTestArray(int count) {
- int[] rvalue = new int[ count ];
-
- for (int j = 0; j < count; j++)
- {
- rvalue[ j ] = j;
- }
- return rvalue;
- }
-
- private static void verifyContents(BATBlock[] blocks, int entries) throws IOException {
- byte[] expected = new byte[ 512 * blocks.length ];
-
- Arrays.fill(expected, ( byte ) 0xFF);
- int offset = 0;
-
- for (int j = 0; j < entries; j++)
- {
- expected[ offset++ ] = ( byte ) j;
- expected[ offset++ ] = 0;
- expected[ offset++ ] = 0;
- expected[ offset++ ] = 0;
- }
- ByteArrayOutputStream stream = new ByteArrayOutputStream(512
- * blocks.length);
-
- for (BATBlock block : blocks) {
- block.writeBlocks(stream);
- }
- byte[] actual = stream.toByteArray();
-
- assertEquals(expected.length, actual.length);
- for (int j = 0; j < expected.length; j++)
- {
- assertEquals(expected[ j ], actual[ j ]);
- }
- }
-
- public void testCreateXBATBlocks() throws IOException {
- // test 0 length array (basic sanity)
- BATBlock[] rvalue = BATBlock.createXBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(0), 1);
-
- assertEquals(0, rvalue.length);
+public final class TestBATBlock {
- // test array of length 1
- rvalue = BATBlock.createXBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(1), 1);
- assertEquals(1, rvalue.length);
- verifyXBATContents(rvalue, 1, 1);
-
- // test array of length 127
- rvalue = BATBlock.createXBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(127), 1);
- assertEquals(1, rvalue.length);
- verifyXBATContents(rvalue, 127, 1);
-
- // test array of length 128
- rvalue = BATBlock.createXBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(128), 1);
- assertEquals(2, rvalue.length);
- verifyXBATContents(rvalue, 128, 1);
-
- // test array of length 254
- rvalue = BATBlock.createXBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(254), 1);
- assertEquals(2, rvalue.length);
- verifyXBATContents(rvalue, 254, 1);
-
- // test array of length 255
- rvalue = BATBlock.createXBATBlocks(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, createTestArray(255), 1);
- assertEquals(3, rvalue.length);
- verifyXBATContents(rvalue, 255, 1);
- }
-
- private static void verifyXBATContents(BATBlock[] blocks, int entries, int start_block)
- throws IOException {
- byte[] expected = new byte[ 512 * blocks.length ];
-
- Arrays.fill(expected, ( byte ) 0xFF);
- int offset = 0;
-
- for (int j = 0; j < entries; j++)
- {
- if ((j % 127) == 0)
- {
- if (j != 0)
- {
- offset += 4;
- }
- }
- expected[ offset++ ] = ( byte ) j;
- expected[ offset++ ] = 0;
- expected[ offset++ ] = 0;
- expected[ offset++ ] = 0;
- }
- for (int j = 0; j < (blocks.length - 1); j++)
- {
- offset = 508 + (j * 512);
- expected[ offset++ ] = ( byte ) (start_block + j + 1);
- expected[ offset++ ] = 0;
- expected[ offset++ ] = 0;
- expected[ offset++ ] = 0;
- }
- offset = (blocks.length * 512) - 4;
- expected[ offset++ ] = ( byte ) -2;
- expected[ offset++ ] = ( byte ) -1;
- expected[ offset++ ] = ( byte ) -1;
- expected[ offset++ ] = ( byte ) -1;
- ByteArrayOutputStream stream = new ByteArrayOutputStream(512
- * blocks.length);
-
- for (BATBlock block : blocks) {
- block.writeBlocks(stream);
- }
- byte[] actual = stream.toByteArray();
-
- assertEquals(expected.length, actual.length);
- for (int j = 0; j < expected.length; j++)
- {
- assertEquals("offset " + j, expected[ j ], actual[ j ]);
- }
- }
-
- public void testCalculateXBATStorageRequirements() {
- int[] blockCounts = { 0, 1, 127, 128 };
- int[] requirements = { 0, 1, 1, 2 };
-
- for (int j = 0; j < blockCounts.length; j++)
- {
- assertEquals(
- "requirement for " + blockCounts[ j ], requirements[ j ],
- BATBlock.calculateXBATStorageRequirements(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, blockCounts[ j ]));
- }
- }
+ @Test
public void testEntriesPerBlock() {
assertEquals(128, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS.getBATEntriesPerBlock());
}
+
+ @Test
public void testEntriesPerXBATBlock() {
assertEquals(127, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS.getXBATEntriesPerBlock());
}
+
+ @Test
public void testGetXBATChainOffset() {
assertEquals(508, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS.getNextXBATChainOffset());
}
-
- public void testCalculateMaximumSize() throws Exception {
+
+ @Test
+ public void testCalculateMaximumSize() {
// Zero fat blocks isn't technically valid, but it'd be header only
assertEquals(
512,
// Check for >2gb, which we only support via a File
assertEquals(
- 512 + 8030l*512*128,
+ 512 + 8030L *512*128,
BATBlock.calculateMaximumSize(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, 8030)
);
assertEquals(
- 4096 + 8030l*4096*1024,
+ 4096 + 8030L *4096*1024,
BATBlock.calculateMaximumSize(POIFSConstants.LARGER_BIG_BLOCK_SIZE_DETAILS, 8030)
);
}
-
- public void testUsedSectors() throws Exception {
+
+ @Test
+ public void testUsedSectors() {
POIFSBigBlockSize b512 = POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;
POIFSBigBlockSize b4096 = POIFSConstants.LARGER_BIG_BLOCK_SIZE_DETAILS;
// Try first with 512 block sizes, which can hold 128 entries
BATBlock block512 = BATBlock.createEmptyBATBlock(b512, false);
- assertEquals(true, block512.hasFreeSectors());
+ assertTrue(block512.hasFreeSectors());
assertEquals(0, block512.getUsedSectors(false));
// Allocate a few
block512.setValueAt(0, 42);
block512.setValueAt(10, 42);
block512.setValueAt(20, 42);
- assertEquals(true, block512.hasFreeSectors());
+ assertTrue(block512.hasFreeSectors());
assertEquals(3, block512.getUsedSectors(false));
// Allocate all
block512.setValueAt(i, 82);
}
// Check
- assertEquals(false, block512.hasFreeSectors());
+ assertFalse(block512.hasFreeSectors());
assertEquals(128, block512.getUsedSectors(false));
assertEquals(127, block512.getUsedSectors(true));
// Release one
block512.setValueAt(10, POIFSConstants.UNUSED_BLOCK);
- assertEquals(true, block512.hasFreeSectors());
+ assertTrue(block512.hasFreeSectors());
assertEquals(127, block512.getUsedSectors(false));
assertEquals(126, block512.getUsedSectors(true));
// Now repeat with 4096 block sizes
BATBlock block4096 = BATBlock.createEmptyBATBlock(b4096, false);
- assertEquals(true, block4096.hasFreeSectors());
+ assertTrue(block4096.hasFreeSectors());
assertEquals(0, block4096.getUsedSectors(false));
block4096.setValueAt(0, 42);
block4096.setValueAt(10, 42);
block4096.setValueAt(20, 42);
- assertEquals(true, block4096.hasFreeSectors());
+ assertTrue(block4096.hasFreeSectors());
assertEquals(3, block4096.getUsedSectors(false));
// Allocate all
block4096.setValueAt(i, 82);
}
// Check
- assertEquals(false, block4096.hasFreeSectors());
+ assertFalse(block4096.hasFreeSectors());
assertEquals(1024, block4096.getUsedSectors(false));
assertEquals(1023, block4096.getUsedSectors(true));
}
-
- public void testGetBATBlockAndIndex() throws Exception {
+
+ @Test
+ public void testGetBATBlockAndIndex() {
HeaderBlock header = new HeaderBlock(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
List<BATBlock> blocks = new ArrayList<>();
int offset;
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Arrays;
-
-import junit.framework.AssertionFailedError;
-import org.apache.poi.poifs.common.POIFSBigBlockSize;
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.HexRead;
-import org.apache.poi.util.LittleEndian;
-import org.apache.poi.util.LittleEndianConsts;
-import org.junit.Test;
-
-/**
- * Class to test BlockAllocationTableReader functionality
- */
-public class TestBlockAllocationTableReader {
-
- /**
- * Test small block allocation table constructor
- */
- @Test
- public void testSmallBATConstructor() throws IOException {
-
- // need to create an array of raw blocks containing the SBAT,
- // and a small document block list
- final String sbat_data = "H4sIAAAAAAAAAPv/nzjwj4ZYiYGBAZfcKKAtAAC/sexrAAIAAA==";
-
- RawDataBlock[] sbats = { new RawDataBlock(new ByteArrayInputStream(RawDataUtil.decompress(sbat_data))) };
-
- final String sbt_data =
- "H4sIAAAAAAAAAONg0GDISsxNLdYNNTc3Mrc00tUwNNP1Ty7RNTIwMHQAsk0MdY2NNfWiXNwYsAB2MNmg/sgBmyxhQB395AMm" +
- "BkaK9HNQaD83hfqZKXY/E4OCIQcDK0NwYllqCgeDOEOwnkdocLCjp5+Co4KLa5iCv5tbkEKoNwfQrUhJA6TFVM9Yz4gy94OM" +
- "Aac/svVTaj8zg7tTAAX6ZRk0HDWRAkahJF8BiUtQPyMDITX4ABMFegeDfsrjjzLAxCBBoX7KwED7n/LwG2j7KSv/Bt79A2s/" +
- "NdzPQUWaVDDQ/h/o+meop5+hrx9ng4ku9jOhYVIBM4X2j4KhDQAtwD4rAA4AAA==";
-
- InputStream sbt_input = new ByteArrayInputStream(RawDataUtil.decompress(sbt_data));
-
- BlockListImpl small_blocks = new RawDataBlockList(sbt_input, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- int blockCount = small_blocks.blockCount();
- ListManagedBlock[] lmb = new ListManagedBlock[7*blockCount];
- for (int i=0; i<lmb.length; i++) {
- lmb[i] = small_blocks.get(i % blockCount);
- }
- small_blocks.setBlocks(lmb);
-
- BlockAllocationTableReader sbat = new BlockAllocationTableReader(
- POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, sbats, small_blocks);
- int[] nextIndex = {
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -2, -2, -2, -2, -2, -2,
- -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
- -2, 34, -2, -2, -2, -2, -2, -2, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
- };
-
- for (int j = 0; j < 128; j++) {
- final boolean isUsed = nextIndex[j] != -1;
- assertEquals("checking usage of block " + j, isUsed, sbat.isUsed(j));
-
- if (isUsed) {
- assertEquals("checking usage of block " + j, nextIndex[j], sbat.getNextBlockIndex(j));
- small_blocks.remove(j);
- } else {
- try {
- small_blocks.remove(j);
- fail("removing block " + j + " should have failed");
- } catch (IOException ignored) {
- // expected during successful test
- }
- }
- }
- }
-
- @Test
- public void testReadingConstructor() throws IOException {
-
- // create a document, minus the header block, and use that to
- // create a RawDataBlockList. The document will exist entire
- // of BATBlocks and XBATBlocks
- //
- // we will create two XBAT blocks, which will encompass 128
- // BAT blocks between them, and two extra BAT blocks which
- // will be in the block array passed to the constructor. This
- // makes a total of 130 BAT blocks, which will encompass
- // 16,640 blocks, for a file size of some 8.5 megabytes.
- //
- // Naturally, we'll fake that out ...
- //
- // map of blocks:
- // block 0: xbat block 0
- // block 1: xbat block 1
- // block 2: bat block 0
- // block 3: bat block 1
- // blocks 4-130: bat blocks 2-128, contained in xbat block 0
- // block 131: bat block 129, contained in xbat block 1
- // blocks 132-16639: fictitious blocks, faked out. All blocks
- // whose index is evenly divisible by 256
- // will be unused
- LocalRawDataBlockList list = new LocalRawDataBlockList();
-
- list.createNewXBATBlock(4, 130, 1);
- list.createNewXBATBlock(131, 131, -2);
- for (int j = 0; j < 130; j++) {
- list.createNewBATBlock(j * 128);
- }
- list.fill(132);
- int[] blocks = { 2, 3 };
- BlockAllocationTableReader table = new BlockAllocationTableReader(
- POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, 130, blocks, 2, 0, list);
-
- for (int i = 0; i < (130 * 128); i++) {
- if (i % 256 == 0) {
- assertTrue("verifying block " + i + " is unused", !table.isUsed(i));
- } else if (i % 256 == 255) {
- assertEquals("Verify end of chain for block " + i, POIFSConstants.END_OF_CHAIN,
- table.getNextBlockIndex(i));
- } else {
- assertEquals("Verify next index for block " + i, i + 1, table.getNextBlockIndex(i));
- }
- }
- }
-
- @Test
- public void testFetchBlocks() throws IOException {
-
- // strategy:
- //
- // 1. set up a single BAT block from which to construct a
- // BAT. create nonsense blocks in the raw data block list
- // corresponding to the indices in the BAT block.
- // 2. The indices will include very short documents (0 and 1
- // block in length), longer documents, and some screwed up
- // documents (one with a loop, one that will peek into
- // another document's data, one that includes an unused
- // document, one that includes a reserved (BAT) block, one
- // that includes a reserved (XBAT) block, and one that
- // points off into space somewhere
- LocalRawDataBlockList list = new LocalRawDataBlockList();
- byte[] data = new byte[512];
- int offset = 0;
-
- LittleEndian.putInt(data, offset, -3); // for the BAT block itself
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 1: is at end of file already; start block = -2
- // document 2: has only one block; start block = 1
- LittleEndian.putInt(data, offset, -2);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 3: has a loop in it; start block = 2
- LittleEndian.putInt(data, offset, 2);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 4: peeks into document 2's data; start block = 3
- LittleEndian.putInt(data, offset, 4);
- offset += LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt(data, offset, 1);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 5: includes an unused block; start block = 5
- LittleEndian.putInt(data, offset, 6);
- offset += LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt(data, offset, -1);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 6: includes a BAT block; start block = 7
- LittleEndian.putInt(data, offset, 8);
- offset += LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt(data, offset, 0);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 7: includes an XBAT block; start block = 9
- LittleEndian.putInt(data, offset, 10);
- offset += LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt(data, offset, -4);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 8: goes off into space; start block = 11;
- LittleEndian.putInt(data, offset, 1000);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 9: no screw ups; start block = 12;
- int index = 13;
-
- for (; offset < 508; offset += LittleEndianConsts.INT_SIZE) {
- LittleEndian.putInt(data, offset, index++);
- }
- LittleEndian.putInt(data, offset, -2);
- list.add(new RawDataBlock(new ByteArrayInputStream(data)));
- list.fill(1);
- int[] blocks = { 0 };
- BlockAllocationTableReader table = new BlockAllocationTableReader(
- POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, 1, blocks, 0, -2, list);
- int[] start_blocks = { -2, 1, 2, 3, 5, 7, 9, 11, 12 };
- int[] expected_length = { 0, 1, -1, -1, -1, -1, -1, -1, 116 };
-
- for (int j = 0; j < start_blocks.length; j++) {
- try {
- ListManagedBlock[] dataBlocks = table.fetchBlocks(start_blocks[j], -1, list);
-
- if (expected_length[j] == -1) {
- fail("document " + j + " should have failed, but found a length of "
- + dataBlocks.length);
- } else {
- assertEquals(expected_length[j], dataBlocks.length);
- }
- } catch (IOException e) {
- if (expected_length[j] != -1) {
- // -1 would be a expected failure here, anything else not
- throw e;
- }
- }
- }
- }
-
- /**
- * Bugzilla 48085 describes an error where a corrupted Excel file causes POI to throw an
- * {@link OutOfMemoryError}.
- */
- @Test
- public void testBadSectorAllocationTableSize_bug48085() {
- int BLOCK_SIZE = 512;
- POIFSBigBlockSize bigBlockSize = POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;
- assertEquals(BLOCK_SIZE, bigBlockSize.getBigBlockSize());
-
- // 512 bytes take from the start of bugzilla attachment 24444
- byte[] initData = HexRead.readFromString(
-
- "D0 CF 11 E0 A1 B1 1A E1 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 3E 20 03 20 FE FF 09 20" +
- "06 20 20 20 20 20 20 20 20 20 20 20 01 20 20 20 01 20 20 20 20 20 20 20 20 10 20 20 02 20 20 20" +
- "02 20 20 20 FE FF FF FF 20 20 20 20 20 20 20 20 "
- );
- // the rest of the block is 'FF'
- byte[] data = new byte[BLOCK_SIZE];
- Arrays.fill(data, (byte)0xFF);
- System.arraycopy(initData, 0, data, 0, initData.length);
-
- // similar code to POIFSFileSystem.<init>:
- InputStream stream = new ByteArrayInputStream(data);
- HeaderBlock hb;
- RawDataBlockList dataBlocks;
- try {
- hb = new HeaderBlock(stream);
- dataBlocks = new RawDataBlockList(stream, bigBlockSize);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- try {
- new BlockAllocationTableReader(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,
- hb.getBATCount(), hb.getBATArray(), hb.getXBATCount(),
- hb.getXBATIndex(), dataBlocks);
- } catch (IOException e) {
- // expected during successful test
- assertEquals("Block count 538976257 is too high. POI maximum is 65535.", e.getMessage());
- } catch (OutOfMemoryError e) {
- if (e.getStackTrace()[1].getMethodName().equals("testBadSectorAllocationTableSize")) {
- throw new AssertionFailedError("Identified bug 48085");
- }
- }
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.Arrays;
-
-import junit.framework.TestCase;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.LittleEndian;
-import org.apache.poi.util.LittleEndianConsts;
-
-/**
- * Class to test BlockAllocationTableWriter functionality
- *
- * @author Marc Johnson
- */
-public final class TestBlockAllocationTableWriter extends TestCase {
-
- public void testAllocateSpace() {
- BlockAllocationTableWriter table =
- new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- int[] blockSizes =
- {
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
- };
- int expectedIndex = 0;
-
- for (int blockSize : blockSizes) {
- assertEquals(expectedIndex, table.allocateSpace(blockSize));
- expectedIndex += blockSize;
- }
- }
-
- public void testCreateBlocks() {
- BlockAllocationTableWriter table = new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
-
- table.allocateSpace(127);
- table.createBlocks();
- verifyBlocksCreated(table, 1);
- table = new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- table.allocateSpace(128);
- table.createBlocks();
- verifyBlocksCreated(table, 2);
- table = new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- table.allocateSpace(254);
- table.createBlocks();
- verifyBlocksCreated(table, 2);
- table = new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- table.allocateSpace(255);
- table.createBlocks();
- verifyBlocksCreated(table, 3);
- table = new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- table.allocateSpace(13843);
- table.createBlocks();
- verifyBlocksCreated(table, 109);
- table = new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- table.allocateSpace(13844);
- table.createBlocks();
- verifyBlocksCreated(table, 110);
- table = new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- table.allocateSpace(13969);
- table.createBlocks();
- verifyBlocksCreated(table, 110);
- table = new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- table.allocateSpace(13970);
- table.createBlocks();
- verifyBlocksCreated(table, 111);
- }
-
- /**
- * Test content produced by BlockAllocationTableWriter
- */
- public void testProduct() throws IOException {
- BlockAllocationTableWriter table = new BlockAllocationTableWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
-
- for (int k = 1; k <= 22; k++)
- {
- table.allocateSpace(k);
- }
- table.createBlocks();
- ByteArrayOutputStream stream = new ByteArrayOutputStream();
-
- table.writeBlocks(stream);
- byte[] output = stream.toByteArray();
-
- assertEquals(1024, output.length);
- byte[] expected = new byte[ 1024 ];
-
- Arrays.fill(expected, ( byte ) 0xFF);
- int offset = 0;
- int block_index = 1;
-
- for (int k = 1; k <= 22; k++)
- {
- int limit = k - 1;
-
- for (int j = 0; j < limit; j++)
- {
- LittleEndian.putInt(expected, offset, block_index++);
- offset += LittleEndianConsts.INT_SIZE;
- }
- LittleEndian.putInt(expected, offset,
- POIFSConstants.END_OF_CHAIN);
- offset += 4;
- block_index++;
- }
-
- // add BAT block indices
- LittleEndian.putInt(expected, offset, block_index++);
- offset += LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt(expected, offset, POIFSConstants.END_OF_CHAIN);
- for (int k = 0; k < expected.length; k++)
- {
- assertEquals("At offset " + k, expected[ k ], output[ k ]);
- }
- }
-
- private static void verifyBlocksCreated(BlockAllocationTableWriter table, int count){
- ByteArrayOutputStream stream = new ByteArrayOutputStream();
-
- try {
- table.writeBlocks(stream);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- byte[] output = stream.toByteArray();
-
- assertEquals(count * 512, output.length);
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.LittleEndian;
-import org.apache.poi.util.LittleEndianConsts;
-import org.junit.Test;
-
-/**
- * Class to test BlockListImpl functionality
- *
- * @author Marc Johnson
- */
-public final class TestBlockListImpl {
- private static final class BlockListTestImpl extends BlockListImpl {
- public BlockListTestImpl() {
- // no extra initialisation
- }
- }
- private static BlockListImpl create() {
- return new BlockListTestImpl();
- }
-
- @Test
- public void testZap() throws IOException {
- BlockListImpl list = create();
-
- // verify that you can zap anything
- for (int j = -2; j < 10; j++)
- {
- list.zap(j);
- }
- RawDataBlock[] blocks = new RawDataBlock[ 5 ];
-
- for (int j = 0; j < 5; j++)
- {
- blocks[ j ] =
- new RawDataBlock(new ByteArrayInputStream(new byte[ 512 ]));
- }
- list.setBlocks(blocks);
- for (int j = -2; j < 10; j++)
- {
- list.zap(j);
- }
-
- // verify that all blocks are gone
- for (int j = 0; j < 5; j++)
- {
- try
- {
- list.remove(j);
- fail("removing item " + j + " should not have succeeded");
- }
- catch (IOException ignored)
- {
- }
- }
- }
-
- @Test
- public void testRemove() throws IOException {
- BlockListImpl list = create();
- RawDataBlock[] blocks = new RawDataBlock[ 5 ];
- byte[] data = new byte[ 512 * 5 ];
-
- for (int j = 0; j < 5; j++)
- {
- Arrays.fill(data, j * 512, (j * 512) + 512, ( byte ) j);
- }
- ByteArrayInputStream stream = new ByteArrayInputStream(data);
-
- for (int j = 0; j < 5; j++)
- {
- blocks[ j ] = new RawDataBlock(stream);
- }
- list.setBlocks(blocks);
-
- // verify that you can't remove illegal indices
- for (int j = -2; j < 10; j++)
- {
- if ((j < 0) || (j >= 5))
- {
- try
- {
- list.remove(j);
- fail("removing item " + j + " should have failed");
- }
- catch (IOException ignored)
- {
- }
- }
- }
-
- // verify we can safely and correctly remove all blocks
- for (int j = 0; j < 5; j++)
- {
- byte[] output = list.remove(j).getData();
-
- for (int k = 0; k < 512; k++)
- {
- assertEquals("testing block " + j + ", index " + k,
- data[ (j * 512) + k ], output[ k ]);
- }
- }
-
- // verify that all blocks are gone
- for (int j = 0; j < 5; j++)
- {
- try
- {
- list.remove(j);
- fail("removing item " + j + " should not have succeeded");
- }
- catch (IOException ignored)
- {
- }
- }
- }
-
- @Test
- public void testSetBAT() throws IOException {
- BlockListImpl list = create();
-
- list.setBAT(null);
- list.setBAT(new BlockAllocationTableReader(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS));
- try
- {
- list.setBAT(new BlockAllocationTableReader(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS));
- fail("second attempt should have failed");
- }
- catch (IOException ignored)
- {
- }
- }
-
- @Test
- public void testFetchBlocks() throws IOException {
-
- // strategy:
- //
- // 1. set up a single BAT block from which to construct a
- // BAT. create nonsense blocks in the raw data block list
- // corresponding to the indices in the BAT block.
- // 2. The indices will include very short documents (0 and 1
- // block in length), longer documents, and some screwed up
- // documents (one with a loop, one that will peek into
- // another document's data, one that includes an unused
- // document, one that includes a reserved (BAT) block, one
- // that includes a reserved (XBAT) block, and one that
- // points off into space somewhere
- BlockListImpl list = create();
- List<RawDataBlock> raw_blocks = new ArrayList<>();
- byte[] data = new byte[ 512 ];
- int offset = 0;
-
- LittleEndian.putInt(data, offset, -3); // for the BAT block itself
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 1: is at end of file already; start block = -2
- // document 2: has only one block; start block = 1
- LittleEndian.putInt(data, offset, -2);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 3: has a loop in it; start block = 2
- LittleEndian.putInt(data, offset, 2);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 4: peeks into document 2's data; start block = 3
- LittleEndian.putInt(data, offset, 4);
- offset += LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt(data, offset, 1);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 5: includes an unused block; start block = 5
- LittleEndian.putInt(data, offset, 6);
- offset += LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt(data, offset, -1);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 6: includes a BAT block; start block = 7
- LittleEndian.putInt(data, offset, 8);
- offset += LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt(data, offset, 0);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 7: includes an XBAT block; start block = 9
- LittleEndian.putInt(data, offset, 10);
- offset += LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt(data, offset, -4);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 8: goes off into space; start block = 11;
- LittleEndian.putInt(data, offset, 1000);
- offset += LittleEndianConsts.INT_SIZE;
-
- // document 9: no screw ups; start block = 12;
- int index = 13;
-
- for (; offset < 508; offset += LittleEndianConsts.INT_SIZE)
- {
- LittleEndian.putInt(data, offset, index++);
- }
- LittleEndian.putInt(data, offset, -2);
- raw_blocks.add(new RawDataBlock(new ByteArrayInputStream(data)));
- for (int j = raw_blocks.size(); j < 128; j++)
- {
- raw_blocks.add(
- new RawDataBlock(new ByteArrayInputStream(new byte[ 0 ])));
- }
- list.setBlocks(raw_blocks.toArray(new RawDataBlock[raw_blocks.size()]));
- int[] blocks =
- {
- 0
- };
- BlockAllocationTableReader table =
- new BlockAllocationTableReader(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS, 1, blocks, 0, -2, list);
- int[] start_blocks =
- {
- -2, 1, 2, 3, 5, 7, 9, 11, 12
- };
- int[] expected_length =
- {
- 0, 1, -1, -1, -1, -1, -1, -1, 116
- };
-
- for (int j = 0; j < start_blocks.length; j++)
- {
- try
- {
- ListManagedBlock[] dataBlocks =
- list.fetchBlocks(start_blocks[ j ], -1);
-
- if (expected_length[ j ] == -1)
- {
- fail("document " + j + " should have failed");
- }
- else
- {
- assertEquals(expected_length[ j ], dataBlocks.length);
- }
- }
- catch (IOException e)
- {
- if (expected_length[ j ] == -1)
- {
-
- // no problem, we expected a failure here
- }
- else
- {
- throw e;
- }
- }
- }
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-
-import junit.framework.TestCase;
-
-/**
- * Class to test DocumentBlock functionality
- *
- * @author Marc Johnson
- */
-public final class TestDocumentBlock extends TestCase {
- static final private byte[] _testdata;
-
- static
- {
- _testdata = new byte[ 2000 ];
- for (int j = 0; j < _testdata.length; j++)
- {
- _testdata[ j ] = ( byte ) j;
- }
- }
-
- /**
- * Test the writing DocumentBlock constructor.
- */
- public void testConstructor()
- throws IOException
- {
- ByteArrayInputStream input = new ByteArrayInputStream(_testdata);
- int index = 0;
- int size = 0;
-
- while (true)
- {
- byte[] data = new byte[ Math.min(_testdata.length - index, 512) ];
-
- System.arraycopy(_testdata, index, data, 0, data.length);
- DocumentBlock block = new DocumentBlock(input, POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
-
- verifyOutput(block, data);
- size += block.size();
- if (block.partiallyRead())
- {
- break;
- }
- index += 512;
- }
- assertEquals(_testdata.length, size);
- }
-
-
- /**
- * Test 'reading' constructor
- */
- public void testReadingConstructor()
- throws IOException
- {
- RawDataBlock input =
- new RawDataBlock(new ByteArrayInputStream(_testdata));
-
- verifyOutput(new DocumentBlock(input), input.getData());
- }
-
- private void verifyOutput(DocumentBlock block, byte [] input)
- throws IOException
- {
- assertEquals(input.length, block.size());
- if (input.length < 512)
- {
- assertTrue(block.partiallyRead());
- }
- else
- {
- assertTrue(!block.partiallyRead());
- }
- ByteArrayOutputStream output = new ByteArrayOutputStream(512);
-
- block.writeBlocks(output);
- byte[] copy = output.toByteArray();
- int j = 0;
-
- for (; j < input.length; j++)
- {
- assertEquals(input[ j ], copy[ j ]);
- }
- for (; j < 512; j++)
- {
- assertEquals(( byte ) 0xFF, copy[ j ]);
- }
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-
-import junit.framework.TestCase;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.LittleEndian;
-import org.apache.poi.util.LittleEndianConsts;
-
-/**
- * Class to test HeaderBlockWriter functionality
- *
- * @author Marc Johnson
- */
-public final class TestHeaderBlockWriting extends TestCase {
-
- private static void confirmEqual(String[] expectedDataHexDumpLines, byte[] actual) {
- byte[] expected = RawDataUtil.decode(expectedDataHexDumpLines);
-
- assertEquals(expected.length, actual.length);
- for (int j = 0; j < expected.length; j++) {
- assertEquals("testing byte " + j, expected[j], actual[j]);
- }
- }
-
- /**
- * Test creating a HeaderBlockWriter
- */
- public void testConstructors() throws IOException {
- HeaderBlockWriter block = new HeaderBlockWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- ByteArrayOutputStream output = new ByteArrayOutputStream(512);
-
- block.writeBlocks(output);
- byte[] copy = output.toByteArray();
- String[] expected = {
- "D0 CF 11 E0 A1 B1 1A E1 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 3B 00 03 00 FE FF 09 00",
- "06 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 10 00 00 FE FF FF FF",
- "00 00 00 00 FE FF FF FF 00 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- };
-
- confirmEqual(expected, copy);
-
- // verify we can read a 'good' HeaderBlockWriter (also test
- // getPropertyStart)
- block.setPropertyStart(0x87654321);
- output = new ByteArrayOutputStream(512);
- block.writeBlocks(output);
- assertEquals(0x87654321, new HeaderBlock(
- new ByteArrayInputStream(output.toByteArray())).getPropertyStart());
- }
-
- /**
- * Test setting the SBAT start block
- */
- public void testSetSBATStart() throws IOException {
- HeaderBlockWriter block = new HeaderBlockWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
-
- block.setSBATStart(0x01234567);
- ByteArrayOutputStream output = new ByteArrayOutputStream(512);
-
- block.writeBlocks(output);
- byte[] copy = output.toByteArray();
- String[] expected = {
- "D0 CF 11 E0 A1 B1 1A E1 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 3B 00 03 00 FE FF 09 00",
- "06 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FE FF FF FF 00 00 00 00 00 10 00 00 67 45 23 01",
- "00 00 00 00 FE FF FF FF 00 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- };
- confirmEqual(expected, copy);
- }
-
- /**
- * test setPropertyStart and getPropertyStart
- */
- public void testSetPropertyStart() throws IOException {
- HeaderBlockWriter block = new HeaderBlockWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
-
- block.setPropertyStart(0x01234567);
- ByteArrayOutputStream output = new ByteArrayOutputStream(512);
-
- block.writeBlocks(output);
- byte[] copy = output.toByteArray();
- String[] expected = {
- "D0 CF 11 E0 A1 B1 1A E1 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 3B 00 03 00 FE FF 09 00",
- "06 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 67 45 23 01 00 00 00 00 00 10 00 00 FE FF FF FF",
- "00 00 00 00 FE FF FF FF 00 00 00 00 FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- };
- confirmEqual(expected, copy);
- }
-
- /**
- * test setting the BAT blocks; also tests getBATCount, getBATArray,
- * getXBATCount
- */
- public void testSetBATBlocks() throws IOException {
-
- // first, a small set of blocks
- HeaderBlockWriter block = new HeaderBlockWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- BATBlock[] xbats = block.setBATBlocks(5, 0x01234567);
-
- assertEquals(0, xbats.length);
- assertEquals(0, HeaderBlockWriter.calculateXBATStorageRequirements(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,5));
- ByteArrayOutputStream output = new ByteArrayOutputStream(512);
-
- block.writeBlocks(output);
- byte[] copy = output.toByteArray();
- String[] expected = {
- "D0 CF 11 E0 A1 B1 1A E1 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 3B 00 03 00 FE FF 09 00",
- "06 00 00 00 00 00 00 00 00 00 00 00 05 00 00 00 FE FF FF FF 00 00 00 00 00 10 00 00 FE FF FF FF",
- "00 00 00 00 FE FF FF FF 00 00 00 00 67 45 23 01 68 45 23 01 69 45 23 01 6A 45 23 01 6B 45 23 01",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- "FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF",
- };
-
- confirmEqual(expected, copy);
-
- // second, a full set of blocks (109 blocks)
- block = new HeaderBlockWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- xbats = block.setBATBlocks(109, 0x01234567);
- assertEquals(0, xbats.length);
- assertEquals(0, HeaderBlockWriter.calculateXBATStorageRequirements(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,109));
- output = new ByteArrayOutputStream(512);
- block.writeBlocks(output);
- copy = output.toByteArray();
- String[] expected2 = {
- "D0 CF 11 E0 A1 B1 1A E1 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 3B 00 03 00 FE FF 09 00",
- "06 00 00 00 00 00 00 00 00 00 00 00 6D 00 00 00 FE FF FF FF 00 00 00 00 00 10 00 00 FE FF FF FF",
- "00 00 00 00 FE FF FF FF 00 00 00 00 67 45 23 01 68 45 23 01 69 45 23 01 6A 45 23 01 6B 45 23 01",
- "6C 45 23 01 6D 45 23 01 6E 45 23 01 6F 45 23 01 70 45 23 01 71 45 23 01 72 45 23 01 73 45 23 01",
- "74 45 23 01 75 45 23 01 76 45 23 01 77 45 23 01 78 45 23 01 79 45 23 01 7A 45 23 01 7B 45 23 01",
- "7C 45 23 01 7D 45 23 01 7E 45 23 01 7F 45 23 01 80 45 23 01 81 45 23 01 82 45 23 01 83 45 23 01",
- "84 45 23 01 85 45 23 01 86 45 23 01 87 45 23 01 88 45 23 01 89 45 23 01 8A 45 23 01 8B 45 23 01",
- "8C 45 23 01 8D 45 23 01 8E 45 23 01 8F 45 23 01 90 45 23 01 91 45 23 01 92 45 23 01 93 45 23 01",
- "94 45 23 01 95 45 23 01 96 45 23 01 97 45 23 01 98 45 23 01 99 45 23 01 9A 45 23 01 9B 45 23 01",
- "9C 45 23 01 9D 45 23 01 9E 45 23 01 9F 45 23 01 A0 45 23 01 A1 45 23 01 A2 45 23 01 A3 45 23 01",
- "A4 45 23 01 A5 45 23 01 A6 45 23 01 A7 45 23 01 A8 45 23 01 A9 45 23 01 AA 45 23 01 AB 45 23 01",
- "AC 45 23 01 AD 45 23 01 AE 45 23 01 AF 45 23 01 B0 45 23 01 B1 45 23 01 B2 45 23 01 B3 45 23 01",
- "B4 45 23 01 B5 45 23 01 B6 45 23 01 B7 45 23 01 B8 45 23 01 B9 45 23 01 BA 45 23 01 BB 45 23 01",
- "BC 45 23 01 BD 45 23 01 BE 45 23 01 BF 45 23 01 C0 45 23 01 C1 45 23 01 C2 45 23 01 C3 45 23 01",
- "C4 45 23 01 C5 45 23 01 C6 45 23 01 C7 45 23 01 C8 45 23 01 C9 45 23 01 CA 45 23 01 CB 45 23 01",
- "CC 45 23 01 CD 45 23 01 CE 45 23 01 CF 45 23 01 D0 45 23 01 D1 45 23 01 D2 45 23 01 D3 45 23 01",
- };
- confirmEqual(expected2, copy);
-
- // finally, a really large set of blocks (256 blocks)
- block = new HeaderBlockWriter(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- xbats = block.setBATBlocks(256, 0x01234567);
- assertEquals(2, xbats.length);
- assertEquals(2, HeaderBlockWriter.calculateXBATStorageRequirements(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,256));
- output = new ByteArrayOutputStream(512);
- block.writeBlocks(output);
- copy = output.toByteArray();
- String[] expected3 = {
- "D0 CF 11 E0 A1 B1 1A E1 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 3B 00 03 00 FE FF 09 00",
- "06 00 00 00 00 00 00 00 00 00 00 00 00 01 00 00 FE FF FF FF 00 00 00 00 00 10 00 00 FE FF FF FF",
- "00 00 00 00 67 46 23 01 02 00 00 00 67 45 23 01 68 45 23 01 69 45 23 01 6A 45 23 01 6B 45 23 01",
- "6C 45 23 01 6D 45 23 01 6E 45 23 01 6F 45 23 01 70 45 23 01 71 45 23 01 72 45 23 01 73 45 23 01",
- "74 45 23 01 75 45 23 01 76 45 23 01 77 45 23 01 78 45 23 01 79 45 23 01 7A 45 23 01 7B 45 23 01",
- "7C 45 23 01 7D 45 23 01 7E 45 23 01 7F 45 23 01 80 45 23 01 81 45 23 01 82 45 23 01 83 45 23 01",
- "84 45 23 01 85 45 23 01 86 45 23 01 87 45 23 01 88 45 23 01 89 45 23 01 8A 45 23 01 8B 45 23 01",
- "8C 45 23 01 8D 45 23 01 8E 45 23 01 8F 45 23 01 90 45 23 01 91 45 23 01 92 45 23 01 93 45 23 01",
- "94 45 23 01 95 45 23 01 96 45 23 01 97 45 23 01 98 45 23 01 99 45 23 01 9A 45 23 01 9B 45 23 01",
- "9C 45 23 01 9D 45 23 01 9E 45 23 01 9F 45 23 01 A0 45 23 01 A1 45 23 01 A2 45 23 01 A3 45 23 01",
- "A4 45 23 01 A5 45 23 01 A6 45 23 01 A7 45 23 01 A8 45 23 01 A9 45 23 01 AA 45 23 01 AB 45 23 01",
- "AC 45 23 01 AD 45 23 01 AE 45 23 01 AF 45 23 01 B0 45 23 01 B1 45 23 01 B2 45 23 01 B3 45 23 01",
- "B4 45 23 01 B5 45 23 01 B6 45 23 01 B7 45 23 01 B8 45 23 01 B9 45 23 01 BA 45 23 01 BB 45 23 01",
- "BC 45 23 01 BD 45 23 01 BE 45 23 01 BF 45 23 01 C0 45 23 01 C1 45 23 01 C2 45 23 01 C3 45 23 01",
- "C4 45 23 01 C5 45 23 01 C6 45 23 01 C7 45 23 01 C8 45 23 01 C9 45 23 01 CA 45 23 01 CB 45 23 01",
- "CC 45 23 01 CD 45 23 01 CE 45 23 01 CF 45 23 01 D0 45 23 01 D1 45 23 01 D2 45 23 01 D3 45 23 01",
- };
-
- confirmEqual(expected3, copy);
-
- output = new ByteArrayOutputStream(1028);
- xbats[0].writeBlocks(output);
- xbats[1].writeBlocks(output);
- copy = output.toByteArray();
- int correct = 0x012345D4;
- int offset = 0;
- int k = 0;
-
- for (; k < 127; k++) {
- assertEquals("XBAT entry " + k, correct, LittleEndian.getInt(copy, offset));
- correct++;
- offset += LittleEndianConsts.INT_SIZE;
- }
- assertEquals("XBAT Chain", 0x01234567 + 257, LittleEndian.getInt(copy, offset));
- offset += LittleEndianConsts.INT_SIZE;
- k++;
- for (; k < 148; k++) {
- assertEquals("XBAT entry " + k, correct, LittleEndian.getInt(copy, offset));
- correct++;
- offset += LittleEndianConsts.INT_SIZE;
- }
- for (; k < 255; k++) {
- assertEquals("XBAT entry " + k, -1, LittleEndian.getInt(copy, offset));
- offset += LittleEndianConsts.INT_SIZE;
- }
- assertEquals("XBAT End of chain", -2, LittleEndian.getInt(copy, offset));
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.poifs.property.Property;
-import org.junit.Test;
-
-/**
- * Class to test PropertyBlock functionality
- */
-public final class TestPropertyBlock {
-
- @Test
- public void testCreatePropertyBlocks() throws Exception {
-
- // test with 0 properties
- List<Property> properties = new ArrayList<>();
- BlockWritable[] blocks =
- PropertyBlock.createPropertyBlockArray(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,properties);
-
- assertEquals(0, blocks.length);
-
- // test with 1 property
- properties.add(new LocalProperty("Root Entry"));
- blocks = PropertyBlock.createPropertyBlockArray(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,properties);
- assertEquals(1, blocks.length);
- byte[] testblock = new byte[ 512 ];
-
- for (int j = 0; j < 4; j++)
- {
- setDefaultBlock(testblock, j);
- }
- testblock[ 0x0000 ] = ( byte ) 'R';
- testblock[ 0x0002 ] = ( byte ) 'o';
- testblock[ 0x0004 ] = ( byte ) 'o';
- testblock[ 0x0006 ] = ( byte ) 't';
- testblock[ 0x0008 ] = ( byte ) ' ';
- testblock[ 0x000A ] = ( byte ) 'E';
- testblock[ 0x000C ] = ( byte ) 'n';
- testblock[ 0x000E ] = ( byte ) 't';
- testblock[ 0x0010 ] = ( byte ) 'r';
- testblock[ 0x0012 ] = ( byte ) 'y';
- testblock[ 0x0040 ] = ( byte ) 22;
- verifyCorrect(blocks, testblock);
-
- // test with 3 properties
- properties.add(new LocalProperty("workbook"));
- properties.add(new LocalProperty("summary"));
- blocks = PropertyBlock.createPropertyBlockArray(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,properties);
- assertEquals(1, blocks.length);
- testblock[ 0x0080 ] = ( byte ) 'w';
- testblock[ 0x0082 ] = ( byte ) 'o';
- testblock[ 0x0084 ] = ( byte ) 'r';
- testblock[ 0x0086 ] = ( byte ) 'k';
- testblock[ 0x0088 ] = ( byte ) 'b';
- testblock[ 0x008A ] = ( byte ) 'o';
- testblock[ 0x008C ] = ( byte ) 'o';
- testblock[ 0x008E ] = ( byte ) 'k';
- testblock[ 0x00C0 ] = ( byte ) 18;
- testblock[ 0x0100 ] = ( byte ) 's';
- testblock[ 0x0102 ] = ( byte ) 'u';
- testblock[ 0x0104 ] = ( byte ) 'm';
- testblock[ 0x0106 ] = ( byte ) 'm';
- testblock[ 0x0108 ] = ( byte ) 'a';
- testblock[ 0x010A ] = ( byte ) 'r';
- testblock[ 0x010C ] = ( byte ) 'y';
- testblock[ 0x0140 ] = ( byte ) 16;
- verifyCorrect(blocks, testblock);
-
- // test with 4 properties
- properties.add(new LocalProperty("wintery"));
- blocks = PropertyBlock.createPropertyBlockArray(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,properties);
- assertEquals(1, blocks.length);
- testblock[ 0x0180 ] = ( byte ) 'w';
- testblock[ 0x0182 ] = ( byte ) 'i';
- testblock[ 0x0184 ] = ( byte ) 'n';
- testblock[ 0x0186 ] = ( byte ) 't';
- testblock[ 0x0188 ] = ( byte ) 'e';
- testblock[ 0x018A ] = ( byte ) 'r';
- testblock[ 0x018C ] = ( byte ) 'y';
- testblock[ 0x01C0 ] = ( byte ) 16;
- verifyCorrect(blocks, testblock);
-
- // test with 5 properties
- properties.add(new LocalProperty("foo"));
- blocks = PropertyBlock.createPropertyBlockArray(POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS,properties);
- assertEquals(2, blocks.length);
- testblock = new byte[ 1024 ];
- for (int j = 0; j < 8; j++)
- {
- setDefaultBlock(testblock, j);
- }
- testblock[ 0x0000 ] = ( byte ) 'R';
- testblock[ 0x0002 ] = ( byte ) 'o';
- testblock[ 0x0004 ] = ( byte ) 'o';
- testblock[ 0x0006 ] = ( byte ) 't';
- testblock[ 0x0008 ] = ( byte ) ' ';
- testblock[ 0x000A ] = ( byte ) 'E';
- testblock[ 0x000C ] = ( byte ) 'n';
- testblock[ 0x000E ] = ( byte ) 't';
- testblock[ 0x0010 ] = ( byte ) 'r';
- testblock[ 0x0012 ] = ( byte ) 'y';
- testblock[ 0x0040 ] = ( byte ) 22;
- testblock[ 0x0080 ] = ( byte ) 'w';
- testblock[ 0x0082 ] = ( byte ) 'o';
- testblock[ 0x0084 ] = ( byte ) 'r';
- testblock[ 0x0086 ] = ( byte ) 'k';
- testblock[ 0x0088 ] = ( byte ) 'b';
- testblock[ 0x008A ] = ( byte ) 'o';
- testblock[ 0x008C ] = ( byte ) 'o';
- testblock[ 0x008E ] = ( byte ) 'k';
- testblock[ 0x00C0 ] = ( byte ) 18;
- testblock[ 0x0100 ] = ( byte ) 's';
- testblock[ 0x0102 ] = ( byte ) 'u';
- testblock[ 0x0104 ] = ( byte ) 'm';
- testblock[ 0x0106 ] = ( byte ) 'm';
- testblock[ 0x0108 ] = ( byte ) 'a';
- testblock[ 0x010A ] = ( byte ) 'r';
- testblock[ 0x010C ] = ( byte ) 'y';
- testblock[ 0x0140 ] = ( byte ) 16;
- testblock[ 0x0180 ] = ( byte ) 'w';
- testblock[ 0x0182 ] = ( byte ) 'i';
- testblock[ 0x0184 ] = ( byte ) 'n';
- testblock[ 0x0186 ] = ( byte ) 't';
- testblock[ 0x0188 ] = ( byte ) 'e';
- testblock[ 0x018A ] = ( byte ) 'r';
- testblock[ 0x018C ] = ( byte ) 'y';
- testblock[ 0x01C0 ] = ( byte ) 16;
- testblock[ 0x0200 ] = ( byte ) 'f';
- testblock[ 0x0202 ] = ( byte ) 'o';
- testblock[ 0x0204 ] = ( byte ) 'o';
- testblock[ 0x0240 ] = ( byte ) 8;
- verifyCorrect(blocks, testblock);
- }
-
- private static void setDefaultBlock(byte [] testblock, int j)
- {
- int base = j * 128;
- int index = 0;
-
- for (; index < 0x40; index++)
- {
- testblock[ base++ ] = ( byte ) 0;
- }
- testblock[ base++ ] = ( byte ) 2;
- testblock[ base++ ] = ( byte ) 0;
- index += 2;
- for (; index < 0x44; index++)
- {
- testblock[ base++ ] = ( byte ) 0;
- }
- for (; index < 0x50; index++)
- {
- testblock[ base++ ] = ( byte ) 0xff;
- }
- for (; index < 0x80; index++)
- {
- testblock[ base++ ] = ( byte ) 0;
- }
- }
-
- private static void verifyCorrect(BlockWritable[] blocks, byte[] testblock)
- throws IOException {
- ByteArrayOutputStream stream = new ByteArrayOutputStream(512
- * blocks.length);
-
- for (BlockWritable b : blocks) {
- b.writeBlocks(stream);
- }
- byte[] output = stream.toByteArray();
-
- assertEquals(testblock.length, output.length);
- for (int j = 0; j < testblock.length; j++) {
- assertEquals("mismatch at offset " + j, testblock[ j ],
- output[ j ]);
- }
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Random;
-
-import junit.framework.TestCase;
-
-import org.apache.poi.util.DummyPOILogger;
-import org.apache.poi.util.POILogger;
-
-/**
- * Class to test RawDataBlock functionality
- *
- * @author Marc Johnson
- */
-public final class TestRawDataBlock extends TestCase {
- /**
- * Test creating a normal RawDataBlock
- */
- public void testNormalConstructor() throws IOException {
- byte[] data = new byte[ 512 ];
-
- for (int j = 0; j < 512; j++)
- {
- data[ j ] = ( byte ) j;
- }
- RawDataBlock block = new RawDataBlock(new ByteArrayInputStream(data));
-
- assertTrue("Should not be at EOF", !block.eof());
- byte[] out_data = block.getData();
-
- assertEquals("Should be same length", data.length, out_data.length);
- for (int j = 0; j < 512; j++)
- {
- assertEquals("Should be same value at offset " + j, data[ j ],
- out_data[ j ]);
- }
- }
-
- /**
- * Test creating an empty RawDataBlock
- */
- public void testEmptyConstructor() throws IOException {
- byte[] data = new byte[ 0 ];
- RawDataBlock block = new RawDataBlock(new ByteArrayInputStream(data));
-
- assertTrue("Should be at EOF", block.eof());
- try
- {
- block.getData();
- }
- catch (IOException ignored)
- {
-
- // as expected
- }
- }
-
- /**
- * Test creating a short RawDataBlock
- * Will trigger a warning, but no longer an IOException,
- * as people seem to have "valid" truncated files
- */
- public void testShortConstructor() throws Exception {
- // Get the logger to be used
- POILogger oldLogger = RawDataBlock.log;
- DummyPOILogger logger = new DummyPOILogger();
- try {
- RawDataBlock.log = logger;
- assertEquals(0, logger.logged.size());
-
- // Test for various data sizes
- for (int k = 1; k <= 512; k++)
- {
- byte[] data = new byte[ k ];
-
- for (int j = 0; j < k; j++)
- {
- data[ j ] = ( byte ) j;
- }
- RawDataBlock block = null;
-
- logger.reset();
- assertEquals(0, logger.logged.size());
-
- // Have it created
- block = new RawDataBlock(new ByteArrayInputStream(data));
- assertNotNull(block);
-
- // Check for the warning is there for <512
- if(k < 512) {
- assertEquals(
- "Warning on " + k + " byte short block",
- 1, logger.logged.size()
- );
-
- // Build the expected warning message, and check
- String bts = k + " byte";
- if(k > 1) {
- bts += "s";
- }
-
- assertEquals(
- "7 - Unable to read entire block; "+bts+" read before EOF; expected 512 bytes. Your document was either written by software that ignores the spec, or has been truncated!",
- logger.logged.get(0)
- );
- } else {
- assertEquals(0, logger.logged.size());
- }
- }
- } finally {
- RawDataBlock.log = oldLogger;
- }
- }
-
- /**
- * Tests that when using a slow input stream, which
- * won't return a full block at a time, we don't
- * incorrectly think that there's not enough data
- */
- public void testSlowInputStream() throws Exception {
- // Get the logger to be used
- POILogger oldLogger = RawDataBlock.log;
- DummyPOILogger logger = new DummyPOILogger();
- try {
- RawDataBlock.log = logger;
- assertEquals(0, logger.logged.size());
-
- // Test for various ok data sizes
- for (int k = 1; k < 512; k++) {
- byte[] data = new byte[ 512 ];
- for (int j = 0; j < data.length; j++) {
- data[j] = (byte) j;
- }
-
- // Shouldn't complain, as there is enough data,
- // even if it dribbles through
- RawDataBlock block =
- new RawDataBlock(new SlowInputStream(data, k));
- assertFalse(block.eof());
- }
-
- // But if there wasn't enough data available, will
- // complain
- for (int k = 1; k < 512; k++) {
- byte[] data = new byte[ 511 ];
- for (int j = 0; j < data.length; j++) {
- data[j] = (byte) j;
- }
-
- logger.reset();
- assertEquals(0, logger.logged.size());
-
- // Should complain, as there isn't enough data
- RawDataBlock block =
- new RawDataBlock(new SlowInputStream(data, k));
- assertNotNull(block);
- assertEquals(
- "Warning on " + k + " byte short block",
- 1, logger.logged.size()
- );
- }
- } finally {
- RawDataBlock.log = oldLogger;
- }
- }
-
- /**
- * An input stream which will return a maximum of
- * a given number of bytes to read, and often claims
- * not to have any data
- */
- public static class SlowInputStream extends InputStream {
- private final Random rnd = new Random();
- private final byte[] data;
- private final int chunkSize;
- private int pos;
-
- public SlowInputStream(byte[] data, int chunkSize) {
- this.chunkSize = chunkSize;
- this.data = data;
- }
-
- /**
- * 75% of the time, claim there's no data available
- */
- private boolean claimNoData() {
- if(rnd.nextFloat() < 0.25f) {
- return false;
- }
- return true;
- }
-
- @Override
- public int read() {
- if(pos >= data.length) {
- return -1;
- }
- int ret = data[pos];
- pos++;
-
- if(ret < 0) ret += 256;
- return ret;
- }
-
- /**
- * Reads the requested number of bytes, or the chunk
- * size, whichever is lower.
- * Quite often will simply claim to have no data
- */
- @Override
- public int read(byte[] b, int off, int len) {
- // Keep the length within the chunk size
- if(len > chunkSize) {
- len = chunkSize;
- }
- // Don't read off the end of the data
- if(pos + len > data.length) {
- len = data.length - pos;
-
- // Spot when we're out of data
- if(len == 0) {
- return -1;
- }
- }
-
- // 75% of the time, claim there's no data
- if(claimNoData()) {
- return 0;
- }
-
- // Copy, and return what we read
- System.arraycopy(data, pos, b, off, len);
- pos += len;
- return len;
- }
-
- @Override
- public int read(byte[] b) {
- return read(b, 0, b.length);
- }
- }
-}
+++ /dev/null
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-package org.apache.poi.poifs.storage;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import junit.framework.TestCase;
-
-import org.apache.poi.poifs.common.POIFSConstants;
-import org.apache.poi.util.DummyPOILogger;
-import org.apache.poi.util.POILogger;
-
-/**
- * Class to test RawDataBlockList functionality
- *
- * @author Marc Johnson
- */
-public final class TestRawDataBlockList extends TestCase {
- /**
- * Test creating a normal RawDataBlockList
- */
- public void testNormalConstructor() throws IOException {
- byte[] data = new byte[ 2560 ];
-
- for (int j = 0; j < 2560; j++)
- {
- data[ j ] = ( byte ) j;
- }
- new RawDataBlockList(new ByteArrayInputStream(data), POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- }
-
- /**
- * Test creating an empty RawDataBlockList
- */
- public void testEmptyConstructor() throws IOException {
- new RawDataBlockList(new ByteArrayInputStream(new byte[ 0 ]), POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- }
-
- /**
- * Test creating a short RawDataBlockList
- */
- public void testShortConstructor() throws Exception {
- // Get the logger to be used
- POILogger oldLogger = RawDataBlock.log;
- DummyPOILogger logger = new DummyPOILogger();
- try {
- RawDataBlock.log = logger;
- assertEquals(0, logger.logged.size());
-
- // Test for various short sizes
- for (int k = 2049; k < 2560; k++)
- {
- byte[] data = new byte[ k ];
-
- for (int j = 0; j < k; j++)
- {
- data[ j ] = ( byte ) j;
- }
-
- // Check we logged the error
- logger.reset();
- new RawDataBlockList(new ByteArrayInputStream(data), POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS);
- assertEquals(1, logger.logged.size());
- }
- } finally {
- RawDataBlock.log = oldLogger;
- }
- }
-}
import java.io.InputStream;
import org.apache.poi.POIDataSamples;
-import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
+import org.apache.poi.poifs.filesystem.POIFSFileSystem;
-public class BaseTestSlideShowFactory {
+public abstract class BaseTestSlideShowFactory {
private static final POIDataSamples _slTests = POIDataSamples.getSlideShowInstance();
@SuppressWarnings("resource")
SlideShow<?,?> ss;
// from NPOIFS
if (file.endsWith(".ppt")) {
- NPOIFSFileSystem npoifs = new NPOIFSFileSystem(fromFile(file));
+ POIFSFileSystem npoifs = new POIFSFileSystem(fromFile(file));
ss = SlideShowFactory.create(npoifs);
assertNotNull(ss);
npoifs.close();
// from protected NPOIFS
if (protectedFile.endsWith(".ppt") || protectedFile.endsWith(".pptx")) {
- NPOIFSFileSystem npoifs = new NPOIFSFileSystem(fromFile(protectedFile));
+ POIFSFileSystem npoifs = new POIFSFileSystem(fromFile(protectedFile));
ss = SlideShowFactory.create(npoifs, password);
assertNotNull(ss);
npoifs.close();
}
}
- public static void testFactory(String file, String protectedFile, String password)
+ @SuppressWarnings("SameParameterValue")
+ protected static void testFactory(String file, String protectedFile, String password)
throws Exception {
testFactoryFromFile(file);
testFactoryFromStream(file);