Browse Source

#62649 - Remove OPOIFS* / rename NPOIFS* to POIFS*

git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1839709 13f79535-47bb-0310-9956-ffa450edef68
tags/REL_4_0_0_FINAL
Andreas Beeker 5 years ago
parent
commit
8ebfec4808
100 changed files with 2130 additions and 4990 deletions
  1. 3
    3
      src/examples/src/org/apache/poi/hpsf/examples/ModifyDocumentSummaryInformation.java
  2. 13
    22
      src/java/org/apache/poi/POIDocument.java
  3. 2
    2
      src/java/org/apache/poi/POIReadOnlyDocument.java
  4. 2
    7
      src/java/org/apache/poi/extractor/OLE2ExtractorFactory.java
  5. 3
    7
      src/java/org/apache/poi/hpsf/HPSFPropertiesOnlyDocument.java
  6. 1
    5
      src/java/org/apache/poi/hpsf/extractor/HPSFPropertiesExtractor.java
  7. 17
    26
      src/java/org/apache/poi/hssf/dev/BiffViewer.java
  8. 11
    26
      src/java/org/apache/poi/hssf/dev/EFBiffViewer.java
  9. 2
    3
      src/java/org/apache/poi/hssf/dev/FormulaViewer.java
  10. 36
    45
      src/java/org/apache/poi/hssf/dev/RecordLister.java
  11. 5
    5
      src/java/org/apache/poi/hssf/extractor/OldExcelExtractor.java
  12. 18
    33
      src/java/org/apache/poi/hssf/usermodel/HSSFWorkbook.java
  13. 3
    2
      src/java/org/apache/poi/hssf/usermodel/HSSFWorkbookFactory.java
  14. 11
    16
      src/java/org/apache/poi/poifs/crypt/ChunkedCipherOutputStream.java
  15. 1
    6
      src/java/org/apache/poi/poifs/crypt/Decryptor.java
  16. 1
    9
      src/java/org/apache/poi/poifs/crypt/EncryptionInfo.java
  17. 0
    4
      src/java/org/apache/poi/poifs/crypt/Encryptor.java
  18. 10
    33
      src/java/org/apache/poi/poifs/crypt/cryptoapi/CryptoAPIEncryptor.java
  19. 3
    6
      src/java/org/apache/poi/poifs/crypt/xor/XOREncryptor.java
  20. 14
    27
      src/java/org/apache/poi/poifs/dev/POIFSDump.java
  21. 7
    8
      src/java/org/apache/poi/poifs/dev/POIFSLister.java
  22. 8
    9
      src/java/org/apache/poi/poifs/dev/POIFSViewer.java
  23. 10
    10
      src/java/org/apache/poi/poifs/eventfilesystem/POIFSReader.java
  24. 9
    9
      src/java/org/apache/poi/poifs/filesystem/DirectoryNode.java
  25. 5
    2
      src/java/org/apache/poi/poifs/filesystem/DocumentFactoryHelper.java
  26. 288
    97
      src/java/org/apache/poi/poifs/filesystem/DocumentInputStream.java
  27. 2
    2
      src/java/org/apache/poi/poifs/filesystem/DocumentNode.java
  28. 125
    115
      src/java/org/apache/poi/poifs/filesystem/DocumentOutputStream.java
  29. 2
    2
      src/java/org/apache/poi/poifs/filesystem/EntryUtils.java
  30. 0
    330
      src/java/org/apache/poi/poifs/filesystem/NDocumentInputStream.java
  31. 0
    163
      src/java/org/apache/poi/poifs/filesystem/NDocumentOutputStream.java
  32. 0
    947
      src/java/org/apache/poi/poifs/filesystem/NPOIFSFileSystem.java
  33. 22
    24
      src/java/org/apache/poi/poifs/filesystem/POIFSDocument.java
  34. 880
    54
      src/java/org/apache/poi/poifs/filesystem/POIFSFileSystem.java
  35. 13
    14
      src/java/org/apache/poi/poifs/filesystem/POIFSMiniStore.java
  36. 15
    16
      src/java/org/apache/poi/poifs/filesystem/POIFSStream.java
  37. 6
    6
      src/java/org/apache/poi/poifs/macros/VBAMacroReader.java
  38. 4
    4
      src/java/org/apache/poi/poifs/property/DocumentProperty.java
  39. 0
    164
      src/java/org/apache/poi/poifs/property/NPropertyTable.java
  40. 3
    30
      src/java/org/apache/poi/poifs/property/PropertyFactory.java
  41. 190
    57
      src/java/org/apache/poi/poifs/property/PropertyTable.java
  42. 0
    174
      src/java/org/apache/poi/poifs/property/PropertyTableBase.java
  43. 31
    167
      src/java/org/apache/poi/poifs/storage/BATBlock.java
  44. 0
    103
      src/java/org/apache/poi/poifs/storage/BigBlock.java
  45. 0
    320
      src/java/org/apache/poi/poifs/storage/BlockAllocationTableReader.java
  46. 0
    186
      src/java/org/apache/poi/poifs/storage/BlockAllocationTableWriter.java
  47. 0
    83
      src/java/org/apache/poi/poifs/storage/BlockList.java
  48. 0
    161
      src/java/org/apache/poi/poifs/storage/BlockListImpl.java
  49. 0
    186
      src/java/org/apache/poi/poifs/storage/DataInputBlock.java
  50. 0
    204
      src/java/org/apache/poi/poifs/storage/DocumentBlock.java
  51. 4
    5
      src/java/org/apache/poi/poifs/storage/HeaderBlock.java
  52. 0
    195
      src/java/org/apache/poi/poifs/storage/HeaderBlockWriter.java
  53. 0
    45
      src/java/org/apache/poi/poifs/storage/ListManagedBlock.java
  54. 0
    127
      src/java/org/apache/poi/poifs/storage/PropertyBlock.java
  55. 0
    151
      src/java/org/apache/poi/poifs/storage/RawDataBlock.java
  56. 0
    70
      src/java/org/apache/poi/poifs/storage/RawDataBlockList.java
  57. 8
    8
      src/java/org/apache/poi/sl/usermodel/SlideShowFactory.java
  58. 8
    8
      src/java/org/apache/poi/ss/usermodel/WorkbookFactory.java
  59. 6
    4
      src/java/org/apache/poi/util/DrawingDump.java
  60. 5
    9
      src/ooxml/java/org/apache/poi/ooxml/extractor/ExtractorFactory.java
  61. 1
    1
      src/ooxml/java/org/apache/poi/xssf/usermodel/XSSFCellStyle.java
  62. 1
    2
      src/ooxml/testcases/org/apache/poi/poifs/crypt/TestDecryptor.java
  63. 10
    11
      src/ooxml/testcases/org/apache/poi/poifs/crypt/TestEncryptor.java
  64. 16
    23
      src/ooxml/testcases/org/apache/poi/xslf/usermodel/TestXSLFSlideShowFactory.java
  65. 20
    23
      src/ooxml/testcases/org/apache/poi/xssf/usermodel/TestXSSFBugs.java
  66. 3
    3
      src/ooxml/testcases/org/apache/poi/xwpf/TestXWPFBugs.java
  67. 8
    17
      src/scratchpad/src/org/apache/poi/hdgf/HDGFDiagram.java
  68. 2
    2
      src/scratchpad/src/org/apache/poi/hdgf/dev/VSDDumper.java
  69. 5
    7
      src/scratchpad/src/org/apache/poi/hdgf/extractor/VisioTextExtractor.java
  70. 2
    5
      src/scratchpad/src/org/apache/poi/hpbf/HPBFDocument.java
  71. 5
    5
      src/scratchpad/src/org/apache/poi/hpbf/dev/HPBFDumper.java
  72. 3
    3
      src/scratchpad/src/org/apache/poi/hpbf/dev/PLCDumper.java
  73. 14
    21
      src/scratchpad/src/org/apache/poi/hpbf/extractor/PublisherTextExtractor.java
  74. 9
    9
      src/scratchpad/src/org/apache/poi/hslf/dev/PPTXMLDump.java
  75. 3
    3
      src/scratchpad/src/org/apache/poi/hslf/dev/SlideShowDumper.java
  76. 0
    10
      src/scratchpad/src/org/apache/poi/hslf/extractor/PowerPointExtractor.java
  77. 5
    5
      src/scratchpad/src/org/apache/poi/hslf/extractor/QuickButCruddyTextExtractor.java
  78. 8
    8
      src/scratchpad/src/org/apache/poi/hslf/record/CurrentUserAtom.java
  79. 33
    36
      src/scratchpad/src/org/apache/poi/hslf/usermodel/HSLFSlideShow.java
  80. 3
    2
      src/scratchpad/src/org/apache/poi/hslf/usermodel/HSLFSlideShowFactory.java
  81. 23
    46
      src/scratchpad/src/org/apache/poi/hslf/usermodel/HSLFSlideShowImpl.java
  82. 5
    5
      src/scratchpad/src/org/apache/poi/hsmf/MAPIMessage.java
  83. 4
    4
      src/scratchpad/src/org/apache/poi/hsmf/dev/HSMFDump.java
  84. 12
    22
      src/scratchpad/src/org/apache/poi/hsmf/extractor/OutlookTextExtactor.java
  85. 5
    9
      src/scratchpad/src/org/apache/poi/hsmf/parsers/POIFSChunkParser.java
  86. 23
    30
      src/scratchpad/src/org/apache/poi/hwpf/HWPFDocument.java
  87. 2
    3
      src/scratchpad/testcases/org/apache/poi/TestPOIDocumentScratchpad.java
  88. 2
    4
      src/scratchpad/testcases/org/apache/poi/hpbf/extractor/TestPublisherTextExtractor.java
  89. 2
    3
      src/scratchpad/testcases/org/apache/poi/hslf/extractor/TestExtractor.java
  90. 7
    10
      src/scratchpad/testcases/org/apache/poi/hslf/record/TestDocumentEncryption.java
  91. 15
    20
      src/scratchpad/testcases/org/apache/poi/hslf/usermodel/TestBugs.java
  92. 6
    6
      src/scratchpad/testcases/org/apache/poi/hslf/usermodel/TestRichTextRun.java
  93. 7
    7
      src/scratchpad/testcases/org/apache/poi/hsmf/TestFixedSizedProperties.java
  94. 16
    12
      src/scratchpad/testcases/org/apache/poi/hsmf/extractor/TestOutlookTextExtractor.java
  95. 14
    12
      src/scratchpad/testcases/org/apache/poi/hsmf/parsers/TestPOIFSChunkParser.java
  96. 2
    3
      src/scratchpad/testcases/org/apache/poi/hwpf/extractor/TestWordExtractor.java
  97. 2
    2
      src/scratchpad/testcases/org/apache/poi/hwpf/extractor/TestWordExtractorBugs.java
  98. 22
    36
      src/scratchpad/testcases/org/apache/poi/hwpf/usermodel/TestBugs.java
  99. 3
    4
      src/scratchpad/testcases/org/apache/poi/hwpf/usermodel/TestHWPFWrite.java
  100. 0
    0
      src/testcases/org/apache/poi/TestPOIDocumentMain.java

+ 3
- 3
src/examples/src/org/apache/poi/hpsf/examples/ModifyDocumentSummaryInformation.java View File

@@ -26,7 +26,7 @@ import org.apache.poi.hpsf.DocumentSummaryInformation;
import org.apache.poi.hpsf.PropertySetFactory;
import org.apache.poi.hpsf.SummaryInformation;
import org.apache.poi.poifs.filesystem.DirectoryEntry;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
* <p>This is a sample application showing how to easily modify properties in
@@ -79,7 +79,7 @@ public class ModifyDocumentSummaryInformation {
File summaryFile = new File(args[0]);

/* Open the POI filesystem. */
try (NPOIFSFileSystem poifs = new NPOIFSFileSystem(summaryFile, false)) {
try (POIFSFileSystem poifs = new POIFSFileSystem(summaryFile, false)) {

/* Read the summary information. */
DirectoryEntry dir = poifs.getRoot();
@@ -128,7 +128,7 @@ public class ModifyDocumentSummaryInformation {
/* Insert some custom properties into the container. */
customProperties.put("Key 1", "Value 1");
customProperties.put("Schl\u00fcssel 2", "Wert 2");
customProperties.put("Sample Number", new Integer(12345));
customProperties.put("Sample Number", 12345);
customProperties.put("Sample Boolean", Boolean.TRUE);
customProperties.put("Sample Date", new Date());


+ 13
- 22
src/java/org/apache/poi/POIDocument.java View File

@@ -39,7 +39,6 @@ import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIDecryptor;
import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIEncryptor;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.Internal;
@@ -74,14 +73,6 @@ public abstract class POIDocument implements Closeable {
this.directory = dir;
}

/**
* Constructs from an old-style OPOIFS
*
* @param fs the filesystem the document is read from
*/
protected POIDocument(NPOIFSFileSystem fs) {
this(fs.getRoot());
}
/**
* Constructs from the default POIFS
*
@@ -203,7 +194,7 @@ public abstract class POIDocument implements Closeable {
protected PropertySet getPropertySet(String setName, EncryptionInfo encryptionInfo) throws IOException {
DirectoryNode dirNode = directory;
NPOIFSFileSystem encPoifs = null;
POIFSFileSystem encPoifs = null;
String step = "getting";
try {
if (encryptionInfo != null && encryptionInfo.isDocPropsEncrypted()) {
@@ -243,7 +234,7 @@ public abstract class POIDocument implements Closeable {
* into the currently open NPOIFSFileSystem
*
* @throws IOException if an error when writing to the open
* {@link NPOIFSFileSystem} occurs
* {@link POIFSFileSystem} occurs
*/
protected void writeProperties() throws IOException {
validateInPlaceWritePossible();
@@ -255,9 +246,9 @@ public abstract class POIDocument implements Closeable {
* @param outFS the POIFSFileSystem to write the properties into
*
* @throws IOException if an error when writing to the
* {@link NPOIFSFileSystem} occurs
* {@link POIFSFileSystem} occurs
*/
protected void writeProperties(NPOIFSFileSystem outFS) throws IOException {
protected void writeProperties(POIFSFileSystem outFS) throws IOException {
writeProperties(outFS, null);
}
/**
@@ -266,13 +257,13 @@ public abstract class POIDocument implements Closeable {
* @param writtenEntries a list of POIFS entries to add the property names too
*
* @throws IOException if an error when writing to the
* {@link NPOIFSFileSystem} occurs
* {@link POIFSFileSystem} occurs
*/
protected void writeProperties(NPOIFSFileSystem outFS, List<String> writtenEntries) throws IOException {
protected void writeProperties(POIFSFileSystem outFS, List<String> writtenEntries) throws IOException {
final EncryptionInfo ei = getEncryptionInfo();
final boolean encryptProps = (ei != null && ei.isDocPropsEncrypted());
try (NPOIFSFileSystem tmpFS = new NPOIFSFileSystem()) {
final NPOIFSFileSystem fs = (encryptProps) ? tmpFS : outFS;
try (POIFSFileSystem tmpFS = new POIFSFileSystem()) {
final POIFSFileSystem fs = (encryptProps) ? tmpFS : outFS;

writePropertySet(SummaryInformation.DEFAULT_STREAM_NAME, getSummaryInformation(), fs, writtenEntries);
writePropertySet(DocumentSummaryInformation.DEFAULT_STREAM_NAME, getDocumentSummaryInformation(), fs, writtenEntries);
@@ -302,7 +293,7 @@ public abstract class POIDocument implements Closeable {
}
}

private void writePropertySet(String name, PropertySet ps, NPOIFSFileSystem outFS, List<String> writtenEntries)
private void writePropertySet(String name, PropertySet ps, POIFSFileSystem outFS, List<String> writtenEntries)
throws IOException {
if (ps == null) {
return;
@@ -320,9 +311,9 @@ public abstract class POIDocument implements Closeable {
* @param outFS the NPOIFSFileSystem to write the property into
*
* @throws IOException if an error when writing to the
* {@link NPOIFSFileSystem} occurs
* {@link POIFSFileSystem} occurs
*/
private void writePropertySet(String name, PropertySet set, NPOIFSFileSystem outFS) throws IOException {
private void writePropertySet(String name, PropertySet set, POIFSFileSystem outFS) throws IOException {
try {
PropertySet mSet = new PropertySet(set);
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
@@ -411,7 +402,7 @@ public abstract class POIDocument implements Closeable {
public abstract void write(OutputStream out) throws IOException;

/**
* Closes the underlying {@link NPOIFSFileSystem} from which
* Closes the underlying {@link POIFSFileSystem} from which
* the document was read, if any. Has no effect on documents
* opened from an InputStream, or newly created ones.<p>
*
@@ -451,7 +442,7 @@ public abstract class POIDocument implements Closeable {
@Internal
protected boolean initDirectory() {
if (directory == null) {
directory = new NPOIFSFileSystem().getRoot(); // NOSONAR
directory = new POIFSFileSystem().getRoot(); // NOSONAR
return true;
}
return false;

+ 2
- 2
src/java/org/apache/poi/POIReadOnlyDocument.java View File

@@ -20,7 +20,7 @@ import java.io.File;
import java.io.OutputStream;

import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;


/**
@@ -33,7 +33,7 @@ public abstract class POIReadOnlyDocument extends POIDocument {
protected POIReadOnlyDocument(DirectoryNode dir) {
super(dir);
}
protected POIReadOnlyDocument(NPOIFSFileSystem fs) {
protected POIReadOnlyDocument(POIFSFileSystem fs) {
super(fs);
}


+ 2
- 7
src/java/org/apache/poi/extractor/OLE2ExtractorFactory.java View File

@@ -32,7 +32,6 @@ import org.apache.poi.hssf.extractor.ExcelExtractor;
import org.apache.poi.poifs.filesystem.DirectoryEntry;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;
@@ -49,7 +48,7 @@ import org.apache.poi.util.POILogger;
* <p>Note 3 - rather than using this, for most cases you would be better
* off switching to <a href="http://tika.apache.org">Apache Tika</a> instead!</p>
*/
@SuppressWarnings("WeakerAccess")
@SuppressWarnings({"WeakerAccess", "JavadocReference"})
public final class OLE2ExtractorFactory {
private static final POILogger LOGGER = POILogFactory.getLogger(OLE2ExtractorFactory.class);
@@ -111,10 +110,6 @@ public final class OLE2ExtractorFactory {
public static <T extends POITextExtractor> T createExtractor(POIFSFileSystem fs) throws IOException {
return (T)createExtractor(fs.getRoot());
}
@SuppressWarnings("unchecked")
public static <T extends POITextExtractor> T createExtractor(NPOIFSFileSystem fs) throws IOException {
return (T)createExtractor(fs.getRoot());
}

@SuppressWarnings("unchecked")
public static <T extends POITextExtractor> T createExtractor(InputStream input) throws IOException {
@@ -131,7 +126,7 @@ public final class OLE2ExtractorFactory {
}
} else {
// Best hope it's OLE2....
return createExtractor(new NPOIFSFileSystem(input));
return createExtractor(new POIFSFileSystem(input));
}
}


+ 3
- 7
src/java/org/apache/poi/hpsf/HPSFPropertiesOnlyDocument.java View File

@@ -25,7 +25,6 @@ import java.util.List;
import org.apache.poi.POIDocument;
import org.apache.poi.poifs.filesystem.EntryUtils;
import org.apache.poi.poifs.filesystem.FilteringDirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
@@ -35,9 +34,6 @@ import org.apache.poi.poifs.filesystem.POIFSFileSystem;
* without affecting the rest of the file
*/
public class HPSFPropertiesOnlyDocument extends POIDocument {
public HPSFPropertiesOnlyDocument(NPOIFSFileSystem fs) {
super(fs.getRoot());
}
public HPSFPropertiesOnlyDocument(POIFSFileSystem fs) {
super(fs);
}
@@ -46,7 +42,7 @@ public class HPSFPropertiesOnlyDocument extends POIDocument {
* Write out to the currently open file the properties changes, but nothing else
*/
public void write() throws IOException {
NPOIFSFileSystem fs = getDirectory().getFileSystem();
POIFSFileSystem fs = getDirectory().getFileSystem();
validateInPlaceWritePossible();
writeProperties(fs, null);
@@ -65,13 +61,13 @@ public class HPSFPropertiesOnlyDocument extends POIDocument {
* Write out, with any properties changes, but nothing else
*/
public void write(OutputStream out) throws IOException {
try (NPOIFSFileSystem fs = new NPOIFSFileSystem()) {
try (POIFSFileSystem fs = new POIFSFileSystem()) {
write(fs);
fs.writeFilesystem(out);
}
}
private void write(NPOIFSFileSystem fs) throws IOException {
private void write(POIFSFileSystem fs) throws IOException {
// For tracking what we've written out, so far
List<String> excepts = new ArrayList<>(2);


+ 1
- 5
src/java/org/apache/poi/hpsf/extractor/HPSFPropertiesExtractor.java View File

@@ -30,7 +30,6 @@ import org.apache.poi.hpsf.Property;
import org.apache.poi.hpsf.PropertySet;
import org.apache.poi.hpsf.SummaryInformation;
import org.apache.poi.hpsf.wellknown.PropertyIDMap;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
@@ -48,9 +47,6 @@ public class HPSFPropertiesExtractor extends POIOLE2TextExtractor {
public HPSFPropertiesExtractor(POIFSFileSystem fs) {
super(new HPSFPropertiesOnlyDocument(fs));
}
public HPSFPropertiesExtractor(NPOIFSFileSystem fs) {
super(new HPSFPropertiesOnlyDocument(fs));
}

public String getDocumentSummaryInformationText() {
if(document == null) { // event based extractor does not have a document
@@ -144,7 +140,7 @@ public class HPSFPropertiesExtractor extends POIOLE2TextExtractor {
public static void main(String[] args) throws IOException {
for (String file : args) {
try (HPSFPropertiesExtractor ext = new HPSFPropertiesExtractor(
new NPOIFSFileSystem(new File(file)))) {
new POIFSFileSystem(new File(file)))) {
System.out.println(ext.getText());
}
}

+ 17
- 26
src/java/org/apache/poi/hssf/dev/BiffViewer.java View File

@@ -19,7 +19,6 @@ package org.apache.poi.hssf.dev;

import java.io.DataInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
@@ -42,7 +41,7 @@ import org.apache.poi.hssf.record.pivottable.ViewDefinitionRecord;
import org.apache.poi.hssf.record.pivottable.ViewFieldsRecord;
import org.apache.poi.hssf.record.pivottable.ViewSourceRecord;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
@@ -71,13 +70,10 @@ public final class BiffViewer {
* @param recListener the record listener to notify about read records
* @param dumpInterpretedRecords if {@code true}, the read records will be written to the PrintWriter
*
* @return an array of Records created from the InputStream
* @exception org.apache.poi.util.RecordFormatException on error processing the InputStream
*/
public static Record[] createRecords(InputStream is, PrintWriter ps, BiffRecordListener recListener, boolean dumpInterpretedRecords)
private static void createRecords(InputStream is, PrintWriter ps, BiffRecordListener recListener, boolean dumpInterpretedRecords)
throws org.apache.poi.util.RecordFormatException {
List<Record> temp = new ArrayList<>();

RecordInputStream recStream = new RecordInputStream(is);
while (true) {
boolean hasNext;
@@ -101,7 +97,6 @@ public final class BiffViewer {
if (record.getSid() == ContinueRecord.sid) {
continue;
}
temp.add(record);

for (String header : recListener.getRecentHeaders()) {
ps.println(header);
@@ -112,9 +107,6 @@ public final class BiffViewer {
}
ps.println();
}
Record[] result = new Record[temp.size()];
temp.toArray(result);
return result;
}


@@ -349,19 +341,19 @@ public final class BiffViewer {
}
return new CommandArgs(biffhex, noint, out, rawhex, noheader, file);
}
public boolean shouldDumpBiffHex() {
boolean shouldDumpBiffHex() {
return _biffhex;
}
public boolean shouldDumpRecordInterpretations() {
boolean shouldDumpRecordInterpretations() {
return !_noint;
}
public boolean shouldOutputToFile() {
boolean shouldOutputToFile() {
return _out;
}
public boolean shouldOutputRawHexOnly() {
boolean shouldOutputRawHexOnly() {
return _rawhex;
}
public boolean suppressHeader() {
boolean suppressHeader() {
return _noHeader;
}
public File getFile() {
@@ -369,7 +361,7 @@ public final class BiffViewer {
}
}
private static final class CommandParseException extends Exception {
public CommandParseException(String msg) {
CommandParseException(String msg) {
super(msg);
}
}
@@ -410,10 +402,10 @@ public final class BiffViewer {
pw = new PrintWriter(new OutputStreamWriter(System.out, Charset.defaultCharset()));
}

NPOIFSFileSystem fs = null;
POIFSFileSystem fs = null;
InputStream is = null;
try {
fs = new NPOIFSFileSystem(cmdArgs.getFile(), true);
fs = new POIFSFileSystem(cmdArgs.getFile(), true);
is = getPOIFSInputStream(fs);

if (cmdArgs.shouldOutputRawHexOnly()) {
@@ -432,13 +424,12 @@ public final class BiffViewer {
}
}

protected static InputStream getPOIFSInputStream(NPOIFSFileSystem fs)
throws IOException, FileNotFoundException {
static InputStream getPOIFSInputStream(POIFSFileSystem fs) throws IOException {
String workbookName = HSSFWorkbook.getWorkbookDirEntryName(fs.getRoot());
return fs.createDocumentInputStream(workbookName);
}

protected static void runBiffViewer(PrintWriter pw, InputStream is,
static void runBiffViewer(PrintWriter pw, InputStream is,
boolean dumpInterpretedRecords, boolean dumpHex, boolean zeroAlignHexDump,
boolean suppressHeader) {
BiffRecordListener recListener = new BiffRecordListener(dumpHex ? pw : null, zeroAlignHexDump, suppressHeader);
@@ -451,7 +442,7 @@ public final class BiffViewer {
private List<String> _headers;
private final boolean _zeroAlignEachRecord;
private final boolean _noHeader;
public BiffRecordListener(Writer hexDumpWriter, boolean zeroAlignEachRecord, boolean noHeader) {
private BiffRecordListener(Writer hexDumpWriter, boolean zeroAlignEachRecord, boolean noHeader) {
_hexDumpWriter = hexDumpWriter;
_zeroAlignEachRecord = zeroAlignEachRecord;
_noHeader = noHeader;
@@ -477,7 +468,7 @@ public final class BiffViewer {
}
}
}
public List<String> getRecentHeaders() {
private List<String> getRecentHeaders() {
List<String> result = _headers;
_headers = new ArrayList<>();
return result;
@@ -510,7 +501,7 @@ public final class BiffViewer {
private int _currentSize;
private boolean _innerHasReachedEOF;

public BiffDumpingStream(InputStream is, IBiffRecordListener listener) {
private BiffDumpingStream(InputStream is, IBiffRecordListener listener) {
_is = new DataInputStream(is);
_listener = listener;
_data = new byte[RecordInputStream.MAX_RECORD_DATA_SIZE + 4];
@@ -601,7 +592,7 @@ public final class BiffViewer {
* @param globalOffset (somewhat arbitrary) used to calculate the addresses printed at the
* start of each line
*/
static void hexDumpAligned(Writer w, byte[] data, int dumpLen, int globalOffset,
private static void hexDumpAligned(Writer w, byte[] data, int dumpLen, int globalOffset,
boolean zeroAlignEachRecord) {
int baseDataOffset = 0;

@@ -712,7 +703,7 @@ public final class BiffViewer {
return ib;
}

private static void writeHex(char buf[], int startInBuf, int value, int nDigits) throws IOException {
private static void writeHex(char buf[], int startInBuf, int value, int nDigits) {
int acc = value;
for(int i=nDigits-1; i>=0; i--) {
int digit = acc & 0x0F;

+ 11
- 26
src/java/org/apache/poi/hssf/dev/EFBiffViewer.java View File

@@ -22,10 +22,8 @@ import java.io.IOException;
import java.io.InputStream;

import org.apache.poi.hssf.eventusermodel.HSSFEventFactory;
import org.apache.poi.hssf.eventusermodel.HSSFListener;
import org.apache.poi.hssf.eventusermodel.HSSFRequest;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
*
@@ -38,32 +36,19 @@ public class EFBiffViewer

/** Creates a new instance of EFBiffViewer */

public EFBiffViewer()
{
@SuppressWarnings("WeakerAccess")
public EFBiffViewer() {
}

public void run() throws IOException {
NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(file), true);
try {
InputStream din = BiffViewer.getPOIFSInputStream(fs);
try {
HSSFRequest req = new HSSFRequest();
req.addListenerForAllRecords(new HSSFListener()
{
public void processRecord(Record rec)
{
System.out.println(rec);
}
});
HSSFEventFactory factory = new HSSFEventFactory();
factory.processEvents(req, din);
} finally {
din.close();
}
} finally {
fs.close();
try (POIFSFileSystem fs = new POIFSFileSystem(new File(file), true);
InputStream din = BiffViewer.getPOIFSInputStream(fs)) {
HSSFRequest req = new HSSFRequest();

req.addListenerForAllRecords(System.out::println);
HSSFEventFactory factory = new HSSFEventFactory();

factory.processEvents(req, din);
}
}


+ 2
- 3
src/java/org/apache/poi/hssf/dev/FormulaViewer.java View File

@@ -26,8 +26,7 @@ import org.apache.poi.hssf.model.HSSFFormulaParser;
import org.apache.poi.hssf.record.FormulaRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordFactory;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.formula.ptg.ExpPtg;
import org.apache.poi.ss.formula.ptg.FuncPtg;
import org.apache.poi.ss.formula.ptg.Ptg;
@@ -56,7 +55,7 @@ public class FormulaViewer
* @throws IOException if the file contained errors
*/
public void run() throws IOException {
NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(file), true);
POIFSFileSystem fs = new POIFSFileSystem(new File(file), true);
try {
InputStream is = BiffViewer.getPOIFSInputStream(fs);
try {

+ 36
- 45
src/java/org/apache/poi/hssf/dev/RecordLister.java View File

@@ -25,7 +25,7 @@ import org.apache.poi.hssf.record.ContinueRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordFactory;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
* This is a low-level debugging class, which simply prints
@@ -47,45 +47,36 @@ public class RecordLister
{
}

public void run()
throws IOException
{
NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(file), true);
try {
InputStream din = BiffViewer.getPOIFSInputStream(fs);
try {
RecordInputStream rinp = new RecordInputStream(din);
while(rinp.hasNextRecord()) {
int sid = rinp.getNextSid();
rinp.nextRecord();
int size = rinp.available();
Class<? extends Record> clz = RecordFactory.getRecordClass(sid);
System.out.print(
formatSID(sid) +
" - " +
formatSize(size) +
" bytes"
);
if(clz != null) {
System.out.print(" \t");
System.out.print(clz.getName().replace("org.apache.poi.hssf.record.", ""));
}
System.out.println();
byte[] data = rinp.readRemainder();
if(data.length > 0) {
System.out.print(" ");
System.out.println( formatData(data) );
}
public void run() throws IOException {
try (POIFSFileSystem fs = new POIFSFileSystem(new File(file), true);
InputStream din = BiffViewer.getPOIFSInputStream(fs)) {
RecordInputStream rinp = new RecordInputStream(din);

while (rinp.hasNextRecord()) {
int sid = rinp.getNextSid();
rinp.nextRecord();

int size = rinp.available();
Class<? extends Record> clz = RecordFactory.getRecordClass(sid);

System.out.print(
formatSID(sid) +
" - " +
formatSize(size) +
" bytes"
);
if (clz != null) {
System.out.print(" \t");
System.out.print(clz.getName().replace("org.apache.poi.hssf.record.", ""));
}
System.out.println();

byte[] data = rinp.readRemainder();
if (data.length > 0) {
System.out.print(" ");
System.out.println(formatData(data));
}
} finally {
din.close();
}
} finally {
fs.close();
}
}
@@ -93,7 +84,7 @@ public class RecordLister
String hex = Integer.toHexString(sid);
String dec = Integer.toString(sid);
StringBuffer s = new StringBuffer();
StringBuilder s = new StringBuilder();
s.append("0x");
for(int i=hex.length(); i<4; i++) {
s.append('0');
@@ -113,7 +104,7 @@ public class RecordLister
String hex = Integer.toHexString(size);
String dec = Integer.toString(size);
StringBuffer s = new StringBuffer();
StringBuilder s = new StringBuilder();
for(int i=hex.length(); i<3; i++) {
s.append('0');
}
@@ -133,7 +124,7 @@ public class RecordLister
return "";
// If possible, do first 4 and last 4 bytes
StringBuffer s = new StringBuffer();
StringBuilder s = new StringBuilder();
if(data.length > 9) {
s.append(byteToHex(data[0]));
s.append(' ');
@@ -155,10 +146,10 @@ public class RecordLister
s.append(' ');
s.append(byteToHex(data[data.length-1]));
} else {
for(int i=0; i<data.length; i++) {
s.append(byteToHex(data[i]));
s.append(' ');
}
for (byte aData : data) {
s.append(byteToHex(aData));
s.append(' ');
}
}
return s.toString();

+ 5
- 5
src/java/org/apache/poi/hssf/extractor/OldExcelExtractor.java View File

@@ -74,9 +74,9 @@ public class OldExcelExtractor implements Closeable {
}

public OldExcelExtractor(File f) throws IOException {
NPOIFSFileSystem poifs = null;
POIFSFileSystem poifs = null;
try {
poifs = new NPOIFSFileSystem(f);
poifs = new POIFSFileSystem(f);
open(poifs);
toClose = poifs;
return;
@@ -100,7 +100,7 @@ public class OldExcelExtractor implements Closeable {
}
}

public OldExcelExtractor(NPOIFSFileSystem fs) throws IOException {
public OldExcelExtractor(POIFSFileSystem fs) throws IOException {
open(fs);
}

@@ -114,7 +114,7 @@ public class OldExcelExtractor implements Closeable {
: new BufferedInputStream(biffStream, 8);

if (FileMagic.valueOf(bis) == FileMagic.OLE2) {
NPOIFSFileSystem poifs = new NPOIFSFileSystem(bis);
POIFSFileSystem poifs = new POIFSFileSystem(bis);
try {
open(poifs);
toClose = poifs; // Fixed by GR, we should not close it here
@@ -130,7 +130,7 @@ public class OldExcelExtractor implements Closeable {
}
}

private void open(NPOIFSFileSystem fs) throws IOException {
private void open(POIFSFileSystem fs) throws IOException {
open(fs.getRoot());
}


+ 18
- 33
src/java/org/apache/poi/hssf/usermodel/HSSFWorkbook.java View File

@@ -94,10 +94,9 @@ import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.EntryUtils;
import org.apache.poi.poifs.filesystem.FilteringDirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSDocument;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.Ole10Native;
import org.apache.poi.poifs.filesystem.POIFSDocument;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.Ole10Native;
import org.apache.poi.ss.SpreadsheetVersion;
import org.apache.poi.ss.formula.FormulaShifter;
import org.apache.poi.ss.formula.FormulaType;
@@ -129,6 +128,7 @@ import org.apache.poi.util.POILogger;
* @see org.apache.poi.hssf.model.InternalWorkbook
* @see org.apache.poi.hssf.usermodel.HSSFSheet
*/
@SuppressWarnings("WeakerAccess")
public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss.usermodel.Workbook {

//arbitrarily selected; may need to increase
@@ -240,19 +240,6 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
public HSSFWorkbook(POIFSFileSystem fs) throws IOException {
this(fs,true);
}
/**
* Given a POI POIFSFileSystem object, read in its Workbook along
* with all related nodes, and populate the high and low level models.
* <p>This calls {@link #HSSFWorkbook(POIFSFileSystem, boolean)} with
* preserve nodes set to true.
*
* @see #HSSFWorkbook(POIFSFileSystem, boolean)
* @see org.apache.poi.poifs.filesystem.POIFSFileSystem
* @exception IOException if the stream cannot be read
*/
public HSSFWorkbook(NPOIFSFileSystem fs) throws IOException {
this(fs.getRoot(),true);
}

/**
* Given a POI POIFSFileSystem object, read in its Workbook and populate
@@ -411,7 +398,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
public HSSFWorkbook(InputStream s, boolean preserveNodes)
throws IOException
{
this(new NPOIFSFileSystem(s).getRoot(), preserveNodes);
this(new POIFSFileSystem(s).getRoot(), preserveNodes);
}

/**
@@ -1154,11 +1141,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss

HSSFName getBuiltInName(byte builtinCode, int sheetIndex) {
int index = findExistingBuiltinNameRecordIdx(sheetIndex, builtinCode);
if (index < 0) {
return null;
} else {
return names.get(index);
}
return (index < 0) ? null : names.get(index);
}


@@ -1244,7 +1227,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
// So we don't confuse users, give them back
// the same object every time, but create
// them lazily
Integer sIdx = Integer.valueOf(idx);
Integer sIdx = idx;
if(fonts.containsKey(sIdx)) {
return fonts.get(sIdx);
}
@@ -1262,7 +1245,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
* Should only be called after deleting fonts,
* and that's not something you should normally do
*/
protected void resetFontCache() {
void resetFontCache() {
fonts = new HashMap<>();
}

@@ -1308,7 +1291,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
}

/**
* Closes the underlying {@link NPOIFSFileSystem} from which
* Closes the underlying {@link POIFSFileSystem} from which
* the Workbook was read, if any.
*
* <p>Once this has been called, no further
@@ -1338,7 +1321,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
// Update the Workbook stream in the file
DocumentNode workbookNode = (DocumentNode)dir.getEntry(
getWorkbookDirEntryName(dir));
NPOIFSDocument workbookDoc = new NPOIFSDocument(workbookNode);
POIFSDocument workbookDoc = new POIFSDocument(workbookNode);
workbookDoc.replaceContents(new ByteArrayInputStream(getBytes()));
// Update the properties streams in the file
@@ -1388,14 +1371,14 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
*/
@Override
public void write(OutputStream stream) throws IOException {
try (NPOIFSFileSystem fs = new NPOIFSFileSystem()) {
try (POIFSFileSystem fs = new POIFSFileSystem()) {
write(fs);
fs.writeFilesystem(stream);
}
}
/** Writes the workbook out to a brand new, empty POIFS */
private void write(NPOIFSFileSystem fs) throws IOException {
private void write(POIFSFileSystem fs) throws IOException {
// For tracking what we've written out, used if we're
// going to be preserving nodes
List<String> excepts = new ArrayList<>(1);
@@ -1525,7 +1508,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
}

@SuppressWarnings("resource")
protected void encryptBytes(byte buf[]) {
void encryptBytes(byte buf[]) {
EncryptionInfo ei = getEncryptionInfo();
if (ei == null) {
return;
@@ -1540,7 +1523,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
ChunkedCipherOutputStream os = enc.getDataStream(leos, initialOffset);
int totalBytes = 0;
while (totalBytes < buf.length) {
plain.read(tmp, 0, 4);
IOUtils.readFully(plain, tmp, 0, 4);
final int sid = LittleEndian.getUShort(tmp, 0);
final int len = LittleEndian.getUShort(tmp, 2);
boolean isPlain = Biff8DecryptingStream.isNeverEncryptedRecord(sid);
@@ -1836,9 +1819,11 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
/**
* Spits out a list of all the drawing records in the workbook.
*/
public void dumpDrawingGroupRecords(boolean fat)
{
public void dumpDrawingGroupRecords(boolean fat) {
DrawingGroupRecord r = (DrawingGroupRecord) workbook.findFirstRecordBySid( DrawingGroupRecord.sid );
if (r == null) {
return;
}
r.decode();
List<EscherRecord> escherRecords = r.getEscherRecords();
PrintWriter w = new PrintWriter(new OutputStreamWriter(System.out, Charset.defaultCharset()));
@@ -2007,7 +1992,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss

}

protected static Map<String,ClassID> getOleMap() {
static Map<String,ClassID> getOleMap() {
Map<String,ClassID> olemap = new HashMap<>();
olemap.put("PowerPoint Document", ClassIDPredefined.POWERPOINT_V8.getClassID());
for (String str : WORKBOOK_DIR_ENTRY_NAMES) {

+ 3
- 2
src/java/org/apache/poi/hssf/usermodel/HSSFWorkbookFactory.java View File

@@ -20,7 +20,7 @@ package org.apache.poi.hssf.usermodel;
import java.io.IOException;

import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.apache.poi.util.Internal;

@@ -28,6 +28,7 @@ import org.apache.poi.util.Internal;
* Helper class which is instantiated by reflection from
* {@link WorkbookFactory#create(java.io.File)} and similar
*/
@SuppressWarnings("unused")
@Internal
public class HSSFWorkbookFactory extends WorkbookFactory {
/**
@@ -35,7 +36,7 @@ public class HSSFWorkbookFactory extends WorkbookFactory {
* Note that in order to properly release resources the
* Workbook should be closed after use.
*/
public static HSSFWorkbook createWorkbook(final NPOIFSFileSystem fs) throws IOException {
public static HSSFWorkbook createWorkbook(final POIFSFileSystem fs) throws IOException {
return new HSSFWorkbook(fs);
}


+ 11
- 16
src/java/org/apache/poi/poifs/crypt/ChunkedCipherOutputStream.java View File

@@ -285,25 +285,20 @@ public abstract class ChunkedCipherOutputStream extends FilterOutputStream {
@Override
public void processPOIFSWriterEvent(POIFSWriterEvent event) {
try {
OutputStream os = event.getStream();

// StreamSize (8 bytes): An unsigned integer that specifies the number of bytes used by data
// encrypted within the EncryptedData field, not including the size of the StreamSize field.
// Note that the actual size of the \EncryptedPackage stream (1) can be larger than this
// value, depending on the block size of the chosen encryption algorithm
byte buf[] = new byte[LittleEndianConsts.LONG_SIZE];
LittleEndian.putLong(buf, 0, pos);
os.write(buf);

FileInputStream fis = new FileInputStream(fileOut);
try {
try (OutputStream os = event.getStream();
FileInputStream fis = new FileInputStream(fileOut)) {

// StreamSize (8 bytes): An unsigned integer that specifies the number of bytes used by data
// encrypted within the EncryptedData field, not including the size of the StreamSize field.
// Note that the actual size of the \EncryptedPackage stream (1) can be larger than this
// value, depending on the block size of the chosen encryption algorithm
byte buf[] = new byte[LittleEndianConsts.LONG_SIZE];
LittleEndian.putLong(buf, 0, pos);
os.write(buf);

IOUtils.copy(fis, os);
} finally {
fis.close();
}

os.close();

if (!fileOut.delete()) {
LOG.log(POILogger.ERROR, "Can't delete temporary encryption file: "+fileOut);
}

+ 1
- 6
src/java/org/apache/poi/poifs/crypt/Decryptor.java View File

@@ -26,7 +26,6 @@ import javax.crypto.spec.SecretKeySpec;

import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

public abstract class Decryptor implements Cloneable {
@@ -121,14 +120,10 @@ public abstract class Decryptor implements Cloneable {
return d;
}

public InputStream getDataStream(NPOIFSFileSystem fs) throws IOException, GeneralSecurityException {
return getDataStream(fs.getRoot());
}

public InputStream getDataStream(POIFSFileSystem fs) throws IOException, GeneralSecurityException {
return getDataStream(fs.getRoot());
}

// for tests
public byte[] getVerifier() {
return verifier;

+ 1
- 9
src/java/org/apache/poi/poifs/crypt/EncryptionInfo.java View File

@@ -26,7 +26,6 @@ import java.io.IOException;

import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
@@ -82,13 +81,6 @@ public class EncryptionInfo implements Cloneable {
this(fs.getRoot());
}
/**
* Opens for decryption
*/
public EncryptionInfo(NPOIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}
/**
* Opens for decryption
*/
@@ -209,7 +201,7 @@ public class EncryptionInfo implements Cloneable {
* @throws IllegalAccessException if the builder class can't be loaded
* @throws InstantiationException if the builder class can't be loaded
*/
@SuppressWarnings("WeakerAccess")
@SuppressWarnings({"WeakerAccess", "JavadocReference"})
protected static EncryptionInfoBuilder getBuilder(EncryptionMode encryptionMode)
throws ClassNotFoundException, IllegalAccessException, InstantiationException {
ClassLoader cl = EncryptionInfo.class.getClassLoader();

+ 0
- 4
src/java/org/apache/poi/poifs/crypt/Encryptor.java View File

@@ -25,7 +25,6 @@ import javax.crypto.spec.SecretKeySpec;

import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

public abstract class Encryptor implements Cloneable {
@@ -51,9 +50,6 @@ public abstract class Encryptor implements Cloneable {
return info.getEncryptor();
}

public OutputStream getDataStream(NPOIFSFileSystem fs) throws IOException, GeneralSecurityException {
return getDataStream(fs.getRoot());
}
public OutputStream getDataStream(POIFSFileSystem fs) throws IOException, GeneralSecurityException {
return getDataStream(fs.getRoot());
}

+ 10
- 33
src/java/org/apache/poi/poifs/crypt/cryptoapi/CryptoAPIEncryptor.java View File

@@ -34,26 +34,23 @@ import javax.crypto.SecretKey;
import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.crypt.ChunkedCipherOutputStream;
import org.apache.poi.poifs.crypt.CryptoFunctions;
import org.apache.poi.poifs.crypt.DataSpaceMapUtils;
import org.apache.poi.poifs.crypt.EncryptionInfo;
import org.apache.poi.poifs.crypt.Encryptor;
import org.apache.poi.poifs.crypt.HashAlgorithm;
import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIDecryptor.StreamDescriptorEntry;
import org.apache.poi.poifs.crypt.standard.EncryptionRecord;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.StringUtil;

public class CryptoAPIEncryptor extends Encryptor implements Cloneable {
private int chunkSize = 512;
protected CryptoAPIEncryptor() {
CryptoAPIEncryptor() {
}

@Override
@@ -96,7 +93,7 @@ public class CryptoAPIEncryptor extends Encryptor implements Cloneable {
* @param cipher may be null, otherwise the given instance is reset to the new block index
* @param block the block index, e.g. the persist/slide id (hslf)
* @return a new cipher object, if cipher was null, otherwise the reinitialized cipher
* @throws GeneralSecurityException
* @throws GeneralSecurityException when the cipher can't be initialized
*/
public Cipher initCipherForBlock(Cipher cipher, int block)
throws GeneralSecurityException {
@@ -104,8 +101,7 @@ public class CryptoAPIEncryptor extends Encryptor implements Cloneable {
}

@Override
public ChunkedCipherOutputStream getDataStream(DirectoryNode dir)
throws IOException, GeneralSecurityException {
public ChunkedCipherOutputStream getDataStream(DirectoryNode dir) throws IOException {
throw new IOException("not supported");
}
@@ -122,7 +118,7 @@ public class CryptoAPIEncryptor extends Encryptor implements Cloneable {
*
* @see <a href="http://msdn.microsoft.com/en-us/library/dd943321(v=office.12).aspx">2.3.5.4 RC4 CryptoAPI Encrypted Summary Stream</a>
*/
public void setSummaryEntries(DirectoryNode dir, String encryptedStream, NPOIFSFileSystem entries)
public void setSummaryEntries(DirectoryNode dir, String encryptedStream, POIFSFileSystem entries)
throws IOException, GeneralSecurityException {
CryptoAPIDocumentOutputStream bos = new CryptoAPIDocumentOutputStream(this); // NOSONAR
byte buf[] = new byte[8];
@@ -191,33 +187,15 @@ public class CryptoAPIEncryptor extends Encryptor implements Cloneable {
dir.createDocument(encryptedStream, new ByteArrayInputStream(bos.getBuf(), 0, savedSize));
}

protected int getKeySizeInBytes() {
return getEncryptionInfo().getHeader().getKeySize() / 8;
}
// protected int getKeySizeInBytes() {
// return getEncryptionInfo().getHeader().getKeySize() / 8;
// }

@Override
public void setChunkSize(int chunkSize) {
this.chunkSize = chunkSize;
}
protected void createEncryptionInfoEntry(DirectoryNode dir) throws IOException {
DataSpaceMapUtils.addDefaultDataSpace(dir);
final EncryptionInfo info = getEncryptionInfo();
final CryptoAPIEncryptionHeader header = (CryptoAPIEncryptionHeader)getEncryptionInfo().getHeader();
final CryptoAPIEncryptionVerifier verifier = (CryptoAPIEncryptionVerifier)getEncryptionInfo().getVerifier();
EncryptionRecord er = new EncryptionRecord() {
@Override
public void write(LittleEndianByteArrayOutputStream bos) {
bos.writeShort(info.getVersionMajor());
bos.writeShort(info.getVersionMinor());
header.write(bos);
verifier.write(bos);
}
};
DataSpaceMapUtils.createEncryptionEntry(dir, "EncryptionInfo", er);
}


@Override
public CryptoAPIEncryptor clone() throws CloneNotSupportedException {
return (CryptoAPIEncryptor)super.clone();
@@ -239,12 +217,11 @@ public class CryptoAPIEncryptor extends Encryptor implements Cloneable {
}

@Override
protected void createEncryptionInfoEntry(DirectoryNode dir, File tmpFile)
throws IOException, GeneralSecurityException {
protected void createEncryptionInfoEntry(DirectoryNode dir, File tmpFile) {
throw new EncryptedDocumentException("createEncryptionInfoEntry not supported");
}

public CryptoAPICipherOutputStream(OutputStream stream)
CryptoAPICipherOutputStream(OutputStream stream)
throws IOException, GeneralSecurityException {
super(stream, CryptoAPIEncryptor.this.chunkSize);
}

+ 3
- 6
src/java/org/apache/poi/poifs/crypt/xor/XOREncryptor.java View File

@@ -26,6 +26,7 @@ import java.util.BitSet;
import javax.crypto.Cipher;
import javax.crypto.spec.SecretKeySpec;

import org.apache.poi.EncryptedDocumentException;
import org.apache.poi.poifs.crypt.ChunkedCipherOutputStream;
import org.apache.poi.poifs.crypt.CryptoFunctions;
import org.apache.poi.poifs.crypt.Encryptor;
@@ -79,9 +80,6 @@ public class XOREncryptor extends Encryptor implements Cloneable {
// chunkSize is irrelevant
}

protected void createEncryptionInfoEntry(DirectoryNode dir) throws IOException {
}

@Override
public XOREncryptor clone() throws CloneNotSupportedException {
return (XOREncryptor)super.clone();
@@ -110,9 +108,8 @@ public class XOREncryptor extends Encryptor implements Cloneable {
}

@Override
protected void createEncryptionInfoEntry(DirectoryNode dir, File tmpFile)
throws IOException, GeneralSecurityException {
XOREncryptor.this.createEncryptionInfoEntry(dir);
protected void createEncryptionInfoEntry(DirectoryNode dir, File tmpFile) {
throw new EncryptedDocumentException("createEncryptionInfoEntry not supported");
}

@Override

+ 14
- 27
src/java/org/apache/poi/poifs/dev/POIFSDump.java View File

@@ -29,20 +29,21 @@ import org.apache.poi.poifs.filesystem.DirectoryEntry;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.NPOIFSStream;
import org.apache.poi.poifs.property.NPropertyTable;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSStream;
import org.apache.poi.poifs.property.PropertyTable;
import org.apache.poi.poifs.storage.HeaderBlock;
import org.apache.poi.util.IOUtils;

/**
* Dump internal structure of a OLE2 file into file system
*/
public class POIFSDump {

public final class POIFSDump {
//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 100_000;

private POIFSDump() {}

public static void main(String[] args) throws IOException {
if (args.length == 0) {
System.err.println("Must specify at least one file to dump");
@@ -66,14 +67,8 @@ public class POIFSDump {
}

System.out.println("Dumping " + filename);
FileInputStream is = new FileInputStream(filename);
NPOIFSFileSystem fs;
try {
fs = new NPOIFSFileSystem(is);
} finally {
is.close();
}
try {
try (FileInputStream is = new FileInputStream(filename);
POIFSFileSystem fs = new POIFSFileSystem(is)) {
DirectoryEntry root = fs.getRoot();
String filenameWithoutPath = new File(filename).getName();
File dumpDir = new File(filenameWithoutPath + "_dump");
@@ -89,7 +84,7 @@ public class POIFSDump {
dump(fs, header.getPropertyStart(), "properties", file);
}
if (dumpMini) {
NPropertyTable props = fs.getPropertyTable();
PropertyTable props = fs.getPropertyTable();
int startBlock = props.getRoot().getStartBlock();
if (startBlock == POIFSConstants.END_OF_CHAIN) {
System.err.println("No Mini Stream in file");
@@ -97,8 +92,6 @@ public class POIFSDump {
dump(fs, startBlock, "mini-stream", file);
}
}
} finally {
fs.close();
}
}
}
@@ -112,11 +105,8 @@ public class POIFSDump {
byte[] bytes = IOUtils.toByteArray(is);
is.close();

OutputStream out = new FileOutputStream(new File(parent, node.getName().trim()));
try {
out.write(bytes);
} finally {
out.close();
try (OutputStream out = new FileOutputStream(new File(parent, node.getName().trim()))) {
out.write(bytes);
}
} else if (entry instanceof DirectoryEntry){
DirectoryEntry dir = (DirectoryEntry)entry;
@@ -130,11 +120,10 @@ public class POIFSDump {
}
}
}
public static void dump(NPOIFSFileSystem fs, int startBlock, String name, File parent) throws IOException {
public static void dump(POIFSFileSystem fs, int startBlock, String name, File parent) throws IOException {
File file = new File(parent, name);
FileOutputStream out = new FileOutputStream(file);
try {
NPOIFSStream stream = new NPOIFSStream(fs, startBlock);
try (FileOutputStream out = new FileOutputStream(file)) {
POIFSStream stream = new POIFSStream(fs, startBlock);

byte[] b = IOUtils.safelyAllocate(fs.getBigBlockSize(), MAX_RECORD_LENGTH);
for (ByteBuffer bb : stream) {
@@ -142,8 +131,6 @@ public class POIFSDump {
bb.get(b);
out.write(b, 0, len);
}
} finally {
out.close();
}
}
}

+ 7
- 8
src/java/org/apache/poi/poifs/dev/POIFSLister.java View File

@@ -25,7 +25,6 @@ import java.util.Iterator;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
@@ -47,23 +46,23 @@ public class POIFSLister {

boolean withSizes = false;
boolean newPOIFS = true;
for (int j = 0; j < args.length; j++) {
if (args[j].equalsIgnoreCase("-size") || args[j].equalsIgnoreCase("-sizes")) {
for (String arg : args) {
if (arg.equalsIgnoreCase("-size") || arg.equalsIgnoreCase("-sizes")) {
withSizes = true;
} else if (args[j].equalsIgnoreCase("-old") || args[j].equalsIgnoreCase("-old-poifs")) {
} else if (arg.equalsIgnoreCase("-old") || arg.equalsIgnoreCase("-old-poifs")) {
newPOIFS = false;
} else {
if(newPOIFS) {
viewFile(args[j], withSizes);
if (newPOIFS) {
viewFile(arg, withSizes);
} else {
viewFileOld(args[j], withSizes);
viewFileOld(arg, withSizes);
}
}
}
}

public static void viewFile(final String filename, boolean withSizes) throws IOException {
NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(filename));
POIFSFileSystem fs = new POIFSFileSystem(new File(filename));
displayDirectory(fs.getRoot(), "", withSizes);
fs.close();
}

+ 8
- 9
src/java/org/apache/poi/poifs/dev/POIFSViewer.java View File

@@ -23,7 +23,7 @@ import java.io.File;
import java.io.IOException;
import java.util.List;

import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
* A simple viewer for POIFS files
@@ -31,8 +31,9 @@ import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
* @author Marc Johnson (mjohnson at apache dot org)
*/

public class POIFSViewer
{
public final class POIFSViewer {

private POIFSViewer() {}

/**
* Display the contents of multiple POIFS files
@@ -47,9 +48,8 @@ public class POIFSViewer
}
boolean printNames = (args.length > 1);

for (int j = 0; j < args.length; j++)
{
viewFile(args[ j ], printNames);
for (String arg : args) {
viewFile(arg, printNames);
}
}

@@ -67,7 +67,7 @@ public class POIFSViewer
System.out.println(flowerbox);
}
try {
NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(filename));
POIFSFileSystem fs = new POIFSFileSystem(new File(filename));
List<String> strings = POIFSViewEngine.inspectViewable(fs, true, 0, " ");
for (String s : strings) {
System.out.print(s);
@@ -77,5 +77,4 @@ public class POIFSViewer
System.out.println(e.getMessage());
}
}
} // end public class POIFSViewer

}

+ 10
- 10
src/java/org/apache/poi/poifs/eventfilesystem/POIFSReader.java View File

@@ -24,12 +24,12 @@ import java.io.IOException;
import java.io.InputStream;

import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.NPOIFSDocument;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSDocument;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSDocumentPath;
import org.apache.poi.poifs.property.DirectoryProperty;
import org.apache.poi.poifs.property.DocumentProperty;
import org.apache.poi.poifs.property.NPropertyTable;
import org.apache.poi.poifs.property.PropertyTable;
import org.apache.poi.poifs.property.Property;
import org.apache.poi.poifs.property.RootProperty;
import org.apache.poi.util.IOUtils;
@@ -59,7 +59,7 @@ public class POIFSReader
*/

public void read(final InputStream stream) throws IOException {
try (NPOIFSFileSystem poifs = new NPOIFSFileSystem(stream)) {
try (POIFSFileSystem poifs = new POIFSFileSystem(stream)) {
read(poifs);
}
}
@@ -72,7 +72,7 @@ public class POIFSReader
* @exception IOException on errors reading, or on invalid data
*/
public void read(final File poifsFile) throws IOException {
try (NPOIFSFileSystem poifs = new NPOIFSFileSystem(poifsFile, true)) {
try (POIFSFileSystem poifs = new POIFSFileSystem(poifsFile, true)) {
read(poifs);
}
}
@@ -84,11 +84,11 @@ public class POIFSReader
*
* @exception IOException on errors reading, or on invalid data
*/
public void read(final NPOIFSFileSystem poifs) throws IOException {
public void read(final POIFSFileSystem poifs) throws IOException {
registryClosed = true;

// get property table from the document
NPropertyTable properties = poifs.getPropertyTable();
PropertyTable properties = poifs.getPropertyTable();

// process documents
RootProperty root = properties.getRoot();
@@ -212,7 +212,7 @@ public class POIFSReader
}
}

private void processProperties(final NPOIFSFileSystem poifs, DirectoryProperty dir, final POIFSDocumentPath path) {
private void processProperties(final POIFSFileSystem poifs, DirectoryProperty dir, final POIFSDocumentPath path) {
boolean hasChildren = false;
for (final Property property : dir) {
hasChildren = true;
@@ -222,10 +222,10 @@ public class POIFSReader
POIFSDocumentPath new_path = new POIFSDocumentPath(path,new String[]{name});
processProperties(poifs, (DirectoryProperty) property, new_path);
} else {
NPOIFSDocument document = null;
POIFSDocument document = null;
for (POIFSReaderListener rl : registry.getListeners(path, name)) {
if (document == null) {
document = new NPOIFSDocument((DocumentProperty)property, poifs);
document = new POIFSDocument((DocumentProperty)property, poifs);
}
try (DocumentInputStream dis = new DocumentInputStream(document)) {
POIFSReaderEvent pe = new POIFSReaderEvent(dis, path, name);

+ 9
- 9
src/java/org/apache/poi/poifs/filesystem/DirectoryNode.java View File

@@ -50,7 +50,7 @@ public class DirectoryNode
private final ArrayList<Entry> _entries = new ArrayList<>();

// the NPOIFSFileSytem we belong to
private final NPOIFSFileSystem _nfilesystem;
private final POIFSFileSystem _nfilesystem;

// the path described by this document
private final POIFSDocumentPath _path;
@@ -64,7 +64,7 @@ public class DirectoryNode
* @param parent the parent of this entry
*/
DirectoryNode(final DirectoryProperty property,
final NPOIFSFileSystem nfilesystem,
final POIFSFileSystem nfilesystem,
final DirectoryNode parent)
{
super(property, parent);
@@ -114,7 +114,7 @@ public class DirectoryNode
/**
* @return the filesystem that this belongs to
*/
public NPOIFSFileSystem getFileSystem()
public POIFSFileSystem getFileSystem()
{
return _nfilesystem;
}
@@ -125,7 +125,7 @@ public class DirectoryNode
* that this belong to, otherwise Null if OPOIFS based
* @return the filesystem that this belongs to
*/
public NPOIFSFileSystem getNFileSystem()
public POIFSFileSystem getNFileSystem()
{
return _nfilesystem;
}
@@ -152,7 +152,7 @@ public class DirectoryNode
*
* @param document the document to be opened
*
* @return a newly opened DocumentInputStream or NDocumentInputStream
* @return a newly opened DocumentInputStream or DocumentInputStream
*
* @exception IOException if the document does not exist or the
* name is that of a DirectoryEntry
@@ -179,7 +179,7 @@ public class DirectoryNode
*
* @exception IOException if the document can't be created
*/
DocumentEntry createDocument(final NPOIFSDocument document)
DocumentEntry createDocument(final POIFSDocument document)
throws IOException
{
DocumentProperty property = document.getDocumentProperty();
@@ -351,7 +351,7 @@ public class DirectoryNode
final InputStream stream)
throws IOException
{
return createDocument(new NPOIFSDocument(name, _nfilesystem, stream));
return createDocument(new POIFSDocument(name, _nfilesystem, stream));
}

/**
@@ -370,7 +370,7 @@ public class DirectoryNode
final POIFSWriterListener writer)
throws IOException
{
return createDocument(new NPOIFSDocument(name, size, _nfilesystem, writer));
return createDocument(new POIFSDocument(name, size, _nfilesystem, writer));
}

/**
@@ -417,7 +417,7 @@ public class DirectoryNode
return createDocument(name, stream);
} else {
DocumentNode existing = (DocumentNode)getEntry(name);
NPOIFSDocument nDoc = new NPOIFSDocument(existing);
POIFSDocument nDoc = new POIFSDocument(existing);
nDoc.replaceContents(stream);
return existing;
}

+ 5
- 2
src/java/org/apache/poi/poifs/filesystem/DocumentFactoryHelper.java View File

@@ -33,7 +33,10 @@ import org.apache.poi.util.Removal;
* SlideShowFactory to combine common code here.
*/
@Internal
public class DocumentFactoryHelper {
public final class DocumentFactoryHelper {
private DocumentFactoryHelper() {
}

/**
* Wrap the OLE2 data in the NPOIFSFileSystem into a decrypted stream by using
* the given password.
@@ -43,7 +46,7 @@ public class DocumentFactoryHelper {
* @return A stream for reading the decrypted data
* @throws IOException If an error occurs while decrypting or if the password does not match
*/
public static InputStream getDecryptedStream(final NPOIFSFileSystem fs, String password)
public static InputStream getDecryptedStream(final POIFSFileSystem fs, String password)
throws IOException {
// wrap the stream in a FilterInputStream to close the NPOIFSFileSystem
// as well when the resulting OPCPackage is closed

+ 288
- 97
src/java/org/apache/poi/poifs/filesystem/DocumentInputStream.java View File

@@ -17,120 +17,261 @@

package org.apache.poi.poifs.filesystem;

import static org.apache.poi.util.LittleEndianConsts.INT_SIZE;
import static org.apache.poi.util.LittleEndianConsts.LONG_SIZE;
import static org.apache.poi.util.LittleEndianConsts.SHORT_SIZE;

import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.Iterator;

import org.apache.poi.poifs.property.DocumentProperty;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.SuppressForbidden;

/**
* This class provides methods to read a DocumentEntry managed by a
* {@link POIFSFileSystem} or {@link NPOIFSFileSystem} instance.
* It creates the appropriate one, and delegates, allowing us to
* work transparently with the two.
* {@link POIFSFileSystem} instance.
*/
public class DocumentInputStream extends InputStream implements LittleEndianInput {
/** returned by read operations if we're at end of document */
protected static final int EOF = -1;

private DocumentInputStream delegate;
/** For use by downstream implementations */
protected DocumentInputStream() {}

/**
* Create an InputStream from the specified DocumentEntry
*
* @param document the DocumentEntry to be read
*
* @exception IOException if the DocumentEntry cannot be opened (like, maybe it has
* been deleted?)
*/
public DocumentInputStream(DocumentEntry document) throws IOException {
if (!(document instanceof DocumentNode)) {
throw new IOException("Cannot open internal document storage");
}
delegate = new NDocumentInputStream(document);
}
public final class DocumentInputStream extends InputStream implements LittleEndianInput {
/** returned by read operations if we're at end of document */
private static final int EOF = -1;

/**
* Create an InputStream from the specified Document
*
* @param document the Document to be read
*/
public DocumentInputStream(NPOIFSDocument document) {
delegate = new NDocumentInputStream(document);
}
/** current offset into the Document */
private int _current_offset;
/** current block count */
private int _current_block_count;

/** current marked offset into the Document (used by mark and reset) */
private int _marked_offset;
/** and the block count for it */
private int _marked_offset_count;

/** the Document's size */
private final int _document_size;

/** have we been closed? */
private boolean _closed;

/** the actual Document */
private final POIFSDocument _document;

private Iterator<ByteBuffer> _data;
private ByteBuffer _buffer;

/**
* Create an InputStream from the specified DocumentEntry
*
* @param document the DocumentEntry to be read
*
* @exception IOException if the DocumentEntry cannot be opened (like, maybe it has
* been deleted?)
*/
public DocumentInputStream(DocumentEntry document) throws IOException {
if (!(document instanceof DocumentNode)) {
throw new IOException("Cannot open internal document storage, " + document + " not a Document Node");
}
_current_offset = 0;
_current_block_count = 0;
_marked_offset = 0;
_marked_offset_count = 0;
_document_size = document.getSize();
_closed = false;

// can't be asserted ... see bug 61300
// assert (_document_size >= 0) : "Document size can't be < 0";

DocumentNode doc = (DocumentNode)document;
DocumentProperty property = (DocumentProperty)doc.getProperty();
_document = new POIFSDocument(
property,
((DirectoryNode)doc.getParent()).getNFileSystem()
);
_data = _document.getBlockIterator();
}

/**
* Create an InputStream from the specified Document
*
* @param document the Document to be read
*/
public DocumentInputStream(POIFSDocument document) {
_current_offset = 0;
_current_block_count = 0;
_marked_offset = 0;
_marked_offset_count = 0;
_document_size = document.getSize();
_closed = false;
_document = document;
_data = _document.getBlockIterator();
}

@Override
@SuppressForbidden("just delegating")
public int available() {
return delegate.available();
}
public int available() {
return remainingBytes();
}

/**
* Helper methods for forbidden api calls
*
* @return the bytes remaining until the end of the stream
*/
private int remainingBytes() {
if (_closed) {
throw new IllegalStateException("cannot perform requested operation on a closed stream");
}
return _document_size - _current_offset;
}

@Override
public void close() {
delegate.close();
}
public void close() {
_closed = true;
}

/**
* Tests if this input stream supports the mark and reset methods.
*
* @return {@code true} always
*/
@Override
public void mark(int ignoredReadlimit) {
delegate.mark(ignoredReadlimit);
}
public boolean markSupported() {
return true;
}

/**
* Tests if this input stream supports the mark and reset methods.
*
* @return <code>true</code> always
*/
@Override
public boolean markSupported() {
return true;
}
public void mark(int ignoredReadlimit) {
_marked_offset = _current_offset;
_marked_offset_count = Math.max(0, _current_block_count - 1);
}

@Override
public int read() throws IOException {
return delegate.read();
}
public int read() throws IOException {
dieIfClosed();
if (atEOD()) {
return EOF;
}
byte[] b = new byte[1];
int result = read(b, 0, 1);
if(result >= 0) {
if(b[0] < 0) {
return b[0]+256;
}
return b[0];
}
return result;
}

@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}

@Override
public int read(byte[] b, int off, int len) throws IOException {
return delegate.read(b, off, len);
}
public int read(byte[] b, int off, int len) throws IOException {
dieIfClosed();
if (b == null) {
throw new IllegalArgumentException("buffer must not be null");
}
if (off < 0 || len < 0 || b.length < off + len) {
throw new IndexOutOfBoundsException("can't read past buffer boundaries");
}
if (len == 0) {
return 0;
}
if (atEOD()) {
return EOF;
}
int limit = Math.min(remainingBytes(), len);
readFully(b, off, limit);
return limit;
}

/**
* Repositions this stream to the position at the time the mark() method was
* last called on this input stream. If mark() has not been called this
* method repositions the stream to its beginning.
*/
/**
* Repositions this stream to the position at the time the mark() method was
* last called on this input stream. If mark() has not been called this
* method repositions the stream to its beginning.
*/
@Override
public void reset() {
delegate.reset();
public void reset() {
// Special case for reset to the start
if(_marked_offset == 0 && _marked_offset_count == 0) {
_current_block_count = _marked_offset_count;
_current_offset = _marked_offset;
_data = _document.getBlockIterator();
_buffer = null;
return;
}

// Start again, then wind on to the required block
_data = _document.getBlockIterator();
_current_offset = 0;
for(int i=0; i<_marked_offset_count; i++) {
_buffer = _data.next();
_current_offset += _buffer.remaining();
}
_current_block_count = _marked_offset_count;
// Do we need to position within it?
if(_current_offset != _marked_offset) {
// Grab the right block
_buffer = _data.next();
_current_block_count++;
// Skip to the right place in it
// (It should be positioned already at the start of the block,
// we need to move further inside the block)
int skipBy = _marked_offset - _current_offset;
_buffer.position(_buffer.position() + skipBy);
}

// All done
_current_offset = _marked_offset;
}

@Override
@Override
public long skip(long n) throws IOException {
return delegate.skip(n);
dieIfClosed();
if (n < 0) {
return 0;
}
long new_offset = _current_offset + n;

if (new_offset < _current_offset) {
// wrap around in converting a VERY large long to an int
new_offset = _document_size;
} else if (new_offset > _document_size) {
new_offset = _document_size;
}
long rval = new_offset - _current_offset;
// TODO Do this better
byte[] skip = IOUtils.safelyAllocate(rval, Integer.MAX_VALUE);
readFully(skip);
return rval;
}

@Override
public byte readByte() {
return delegate.readByte();
private void dieIfClosed() throws IOException {
if (_closed) {
throw new IOException("cannot perform requested operation on a closed stream");
}
}

@Override
public double readDouble() {
return delegate.readDouble();
private boolean atEOD() {
return _current_offset == _document_size;
}

@Override
public short readShort() {
return (short) readUShort();
private void checkAvaliable(int requestedSize) {
if (_closed) {
throw new IllegalStateException("cannot perform requested operation on a closed stream");
}
if (requestedSize > _document_size - _current_offset) {
throw new RuntimeException("Buffer underrun - requested " + requestedSize
+ " bytes but " + (_document_size - _current_offset) + " was available");
}
}

@Override
@@ -140,36 +281,86 @@ public class DocumentInputStream extends InputStream implements LittleEndianInpu

@Override
public void readFully(byte[] buf, int off, int len) {
delegate.readFully(buf, off, len);
}
if (len < 0) {
throw new RuntimeException("Can't read negative number of bytes");
}

@Override
public long readLong() {
return delegate.readLong();
checkAvaliable(len);

int read = 0;
while(read < len) {
if(_buffer == null || _buffer.remaining() == 0) {
_current_block_count++;
_buffer = _data.next();
}
int limit = Math.min(len-read, _buffer.remaining());
_buffer.get(buf, off+read, limit);
_current_offset += limit;
read += limit;
}
}

@Override
public int readInt() {
return delegate.readInt();
}
public void readPlain(byte[] buf, int off, int len) {
readFully(buf, off, len);
}


@Override
public int readUShort() {
return delegate.readUShort();
public byte readByte() {
return (byte) readUByte();
}

@Override
public double readDouble() {
return Double.longBitsToDouble(readLong());
}

@Override
public long readLong() {
checkAvaliable(LONG_SIZE);
byte[] data = new byte[LONG_SIZE];
readFully(data, 0, LONG_SIZE);
return LittleEndian.getLong(data, 0);
}

@Override
public int readUByte() {
return delegate.readUByte();
@Override
public short readShort() {
checkAvaliable(SHORT_SIZE);
byte[] data = new byte[SHORT_SIZE];
readFully(data, 0, SHORT_SIZE);
return LittleEndian.getShort(data);
}

@Override
public int readInt() {
checkAvaliable(INT_SIZE);
byte[] data = new byte[INT_SIZE];
readFully(data, 0, INT_SIZE);
return LittleEndian.getInt(data);
}
public long readUInt() {
int i = readInt();
return i & 0xFFFFFFFFL;
}

@Override
public void readPlain(byte[] buf, int off, int len) {
readFully(buf, off, len);
public int readUShort() {
checkAvaliable(SHORT_SIZE);
byte[] data = new byte[SHORT_SIZE];
readFully(data, 0, SHORT_SIZE);
return LittleEndian.getUShort(data);
}

@Override
public int readUByte() {
checkAvaliable(1);
byte[] data = new byte[1];
readFully(data, 0, 1);
if (data[0] >= 0)
return data[0];
return data[0] + 256;
}
}

+ 2
- 2
src/java/org/apache/poi/poifs/filesystem/DocumentNode.java View File

@@ -35,7 +35,7 @@ public class DocumentNode
{

// underlying POIFSDocument instance
private NPOIFSDocument _document;
private POIFSDocument _document;

/**
* create a DocumentNode. This method is not public by design; it
@@ -56,7 +56,7 @@ public class DocumentNode
*
* @return the internal POIFSDocument
*/
NPOIFSDocument getDocument()
POIFSDocument getDocument()
{
return _document;
}

+ 125
- 115
src/java/org/apache/poi/poifs/filesystem/DocumentOutputStream.java View File

@@ -17,147 +17,157 @@

package org.apache.poi.poifs.filesystem;

import java.io.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;

import java.util.*;
import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.property.DocumentProperty;

/**
* This class provides a wrapper over an OutputStream so that Document
* writers can't accidently go over their size limits
*
* @author Marc Johnson (mjohnson at apache dot org)
* This class provides methods to write a DocumentEntry managed by a
* {@link POIFSFileSystem} instance.
*/

public final class DocumentOutputStream extends OutputStream {
private final OutputStream _stream;
private final int _limit;
private int _written;
/** the Document's size, i.e. the size of the big block data - mini block data is cached and not counted */
private int _document_size = 0;

/** have we been closed? */
private boolean _closed = false;

/** the actual Document */
private POIFSDocument _document;
/** and its Property */
private DocumentProperty _property;
/** our buffer, when null we're into normal blocks */
private ByteArrayOutputStream _buffer =
new ByteArrayOutputStream(POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE);
/** our main block stream, when we're into normal blocks */
private POIFSStream _stream;
private OutputStream _stream_output;

/** a write limit or -1 if unlimited */
private final long _limit;


/**
* Create an OutputStream from the specified DocumentEntry.
* The specified entry will be emptied.
*
* @param document the DocumentEntry to be written
*/
public DocumentOutputStream(DocumentEntry document) throws IOException {
this(document, -1);
}

/**
* Create an OutputStream to create the specified new Entry
*
* @param parent Where to create the Entry
* @param name Name of the new entry
*/
public DocumentOutputStream(DirectoryEntry parent, String name) throws IOException {
this(createDocument(parent, name), -1);
}

/**
* Create a DocumentOutputStream
*
* @param stream the OutputStream to which the data is actually
* read
* @param document the DocumentEntry to which the data is actually written
* @param limit the maximum number of bytes that can be written
*/
DocumentOutputStream(OutputStream stream, int limit) {
_stream = stream;
DocumentOutputStream(DocumentEntry document, long limit) throws IOException {
this(getDocument(document), limit);
}

DocumentOutputStream(POIFSDocument document, long limit) throws IOException {
_document = document;
_document.free();

_property = document.getDocumentProperty();

_limit = limit;
_written = 0;
}

/**
* Writes the specified byte to this output stream. The general
* contract for write is that one byte is written to the output
* stream. The byte to be written is the eight low-order bits of
* the argument b. The 24 high-order bits of b are ignored.
*
* @param b the byte.
* @exception IOException if an I/O error occurs. In particular,
* an IOException may be thrown if the
* output stream has been closed, or if the
* writer tries to write too much data.
*/
public void write(int b)
throws IOException
{
limitCheck(1);
_stream.write(b);
private static POIFSDocument getDocument(DocumentEntry document) throws IOException {
if (!(document instanceof DocumentNode)) {
throw new IOException("Cannot open internal document storage, " + document + " not a Document Node");
}
return new POIFSDocument((DocumentNode)document);
}

/**
* Writes b.length bytes from the specified byte array
* to this output stream.
*
* @param b the data.
* @exception IOException if an I/O error occurs.
*/
public void write(byte b[])
throws IOException
{
write(b, 0, b.length);
private static DocumentEntry createDocument(DirectoryEntry parent, String name) throws IOException {
if (!(parent instanceof DirectoryNode)) {
throw new IOException("Cannot open internal directory storage, " + parent + " not a Directory Node");
}

// Have an empty one created for now
return parent.createDocument(name, new ByteArrayInputStream(new byte[0]));
}

/**
* Writes len bytes from the specified byte array starting at
* offset off to this output stream. The general contract for
* write(b, off, len) is that some of the bytes in the array b are
* written to the output stream in order; element b[off] is the
* first byte written and b[off+len-1] is the last byte written by
* this operation.<p>
* If b is null, a NullPointerException is thrown.<p>
* If off is negative, or len is negative, or off+len is greater
* than the length of the array b, then an
* IndexOutOfBoundsException is thrown.
*
* @param b the data.
* @param off the start offset in the data.
* @param len the number of bytes to write.
* @exception IOException if an I/O error occurs. In particular,
* an IOException</code> is thrown if the
* output stream is closed or if the writer
* tries to write too many bytes.
*/
public void write(byte b[], int off, int len)
throws IOException
{
limitCheck(len);
_stream.write(b, off, len);
private void checkBufferSize() throws IOException {
// Have we gone over the mini stream limit yet?
if (_buffer.size() > POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
// Will need to be in the main stream
byte[] data = _buffer.toByteArray();
_buffer = null;
write(data, 0, data.length);
} else {
// So far, mini stream will work, keep going
}
}

/**
* Flushes this output stream and forces any buffered output bytes
* to be written out.
*
* @exception IOException if an I/O error occurs.
*/
public void flush()
throws IOException
{
_stream.flush();
public void write(int b) throws IOException {
write(new byte[] { (byte)b }, 0, 1);
}

/**
* Closes this output stream and releases any system resources
* associated with this stream. The general contract of close is
* that it closes the output stream. A closed stream cannot
* perform output operations and cannot be reopened.
*
* @exception IOException if an I/O error occurs.
*/
public void close() {
@Override
public void write(byte[] b, int off, int len) throws IOException {
if (_closed) {
throw new IOException("cannot perform requested operation on a closed stream");
}
if (_limit > -1 && (size() + len) > _limit) {
throw new IOException("tried to write too much data");
}

// ignore this call
if (_buffer != null) {
_buffer.write(b, off, len);
checkBufferSize();
} else {
if (_stream == null) {
_stream = new POIFSStream(_document.getFileSystem());
_stream_output = _stream.getOutputStream();
}
_stream_output.write(b, off, len);
_document_size += len;
}
}

/**
* write the rest of the document's data (fill in at the end)
*
* @param totalLimit the actual number of bytes the corresponding
* document must fill
* @param fill the byte to fill remaining space with
*
* @exception IOException on I/O error
*/
void writeFiller(int totalLimit, byte fill)
throws IOException
{
if (totalLimit > _written)
{
byte[] filler = new byte[ totalLimit - _written ];

Arrays.fill(filler, fill);
_stream.write(filler);
public void close() throws IOException {
// Do we have a pending buffer for the mini stream?
if (_buffer != null) {
// It's not much data, so ask POIFSDocument to do it for us
_document.replaceContents(new ByteArrayInputStream(_buffer.toByteArray()));
}
else {
// We've been writing to the stream as we've gone along
// Update the details on the property now
_stream_output.close();
_property.updateSize(_document_size);
_property.setStartBlock(_stream.getStartBlock());
}
// No more!
_closed = true;
}

private void limitCheck(int toBeWritten)
throws IOException
{
if ((_written + toBeWritten) > _limit)
{
throw new IOException("tried to write too much data");
}
_written += toBeWritten;
/**
* @return the amount of written bytes
*/
public long size() {
return _document_size + (_buffer == null ? 0 : _buffer.size());
}
}
}

+ 2
- 2
src/java/org/apache/poi/poifs/filesystem/EntryUtils.java View File

@@ -81,7 +81,7 @@ public final class EntryUtils {
* @param target
* is the target POIFS to copy to
*/
public static void copyNodes( NPOIFSFileSystem source, NPOIFSFileSystem target )
public static void copyNodes(POIFSFileSystem source, POIFSFileSystem target )
throws IOException {
copyNodes( source.getRoot(), target.getRoot() );
}
@@ -96,7 +96,7 @@ public final class EntryUtils {
* @param target is the target POIFS to copy to
* @param excepts is a list of Entry Names to be excluded from the copy
*/
public static void copyNodes( NPOIFSFileSystem source, NPOIFSFileSystem target, List<String> excepts )
public static void copyNodes(POIFSFileSystem source, POIFSFileSystem target, List<String> excepts )
throws IOException {
copyNodes(
new FilteringDirectoryNode(source.getRoot(), excepts),

+ 0
- 330
src/java/org/apache/poi/poifs/filesystem/NDocumentInputStream.java View File

@@ -1,330 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.filesystem;

import static org.apache.poi.util.LittleEndianConsts.INT_SIZE;
import static org.apache.poi.util.LittleEndianConsts.LONG_SIZE;
import static org.apache.poi.util.LittleEndianConsts.SHORT_SIZE;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;

import org.apache.poi.poifs.property.DocumentProperty;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;

/**
* This class provides methods to read a DocumentEntry managed by a
* {@link NPOIFSFileSystem} instance.
*/
public final class NDocumentInputStream extends DocumentInputStream {
/** current offset into the Document */
private int _current_offset;
/** current block count */
private int _current_block_count;

/** current marked offset into the Document (used by mark and reset) */
private int _marked_offset;
/** and the block count for it */
private int _marked_offset_count;

/** the Document's size */
private final int _document_size;

/** have we been closed? */
private boolean _closed;

/** the actual Document */
private final NPOIFSDocument _document;

private Iterator<ByteBuffer> _data;
private ByteBuffer _buffer;

/**
* Create an InputStream from the specified DocumentEntry
*
* @param document the DocumentEntry to be read
*
* @exception IOException if the DocumentEntry cannot be opened (like, maybe it has
* been deleted?)
*/
public NDocumentInputStream(DocumentEntry document) throws IOException {
if (!(document instanceof DocumentNode)) {
throw new IOException("Cannot open internal document storage, " + document + " not a Document Node");
}
_current_offset = 0;
_current_block_count = 0;
_marked_offset = 0;
_marked_offset_count = 0;
_document_size = document.getSize();
_closed = false;

// can't be asserted ... see bug 61300
// assert (_document_size >= 0) : "Document size can't be < 0";

DocumentNode doc = (DocumentNode)document;
DocumentProperty property = (DocumentProperty)doc.getProperty();
_document = new NPOIFSDocument(
property,
((DirectoryNode)doc.getParent()).getNFileSystem()
);
_data = _document.getBlockIterator();
}

/**
* Create an InputStream from the specified Document
*
* @param document the Document to be read
*/
public NDocumentInputStream(NPOIFSDocument document) {
_current_offset = 0;
_current_block_count = 0;
_marked_offset = 0;
_marked_offset_count = 0;
_document_size = document.getSize();
_closed = false;
_document = document;
_data = _document.getBlockIterator();
}

@Override
public int available() {
return remainingBytes();
}

/**
* Helper methods for forbidden api calls
*
* @return the bytes remaining until the end of the stream
*/
private int remainingBytes() {
if (_closed) {
throw new IllegalStateException("cannot perform requested operation on a closed stream");
}
return _document_size - _current_offset;
}

@Override
public void close() {
_closed = true;
}

@Override
public void mark(int ignoredReadlimit) {
_marked_offset = _current_offset;
_marked_offset_count = Math.max(0, _current_block_count - 1);
}

@Override
public int read() throws IOException {
dieIfClosed();
if (atEOD()) {
return EOF;
}
byte[] b = new byte[1];
int result = read(b, 0, 1);
if(result >= 0) {
if(b[0] < 0) {
return b[0]+256;
}
return b[0];
}
return result;
}

@Override
public int read(byte[] b, int off, int len) throws IOException {
dieIfClosed();
if (b == null) {
throw new IllegalArgumentException("buffer must not be null");
}
if (off < 0 || len < 0 || b.length < off + len) {
throw new IndexOutOfBoundsException("can't read past buffer boundaries");
}
if (len == 0) {
return 0;
}
if (atEOD()) {
return EOF;
}
int limit = Math.min(remainingBytes(), len);
readFully(b, off, limit);
return limit;
}

/**
* Repositions this stream to the position at the time the mark() method was
* last called on this input stream. If mark() has not been called this
* method repositions the stream to its beginning.
*/
@Override
public void reset() {
// Special case for reset to the start
if(_marked_offset == 0 && _marked_offset_count == 0) {
_current_block_count = _marked_offset_count;
_current_offset = _marked_offset;
_data = _document.getBlockIterator();
_buffer = null;
return;
}

// Start again, then wind on to the required block
_data = _document.getBlockIterator();
_current_offset = 0;
for(int i=0; i<_marked_offset_count; i++) {
_buffer = _data.next();
_current_offset += _buffer.remaining();
}
_current_block_count = _marked_offset_count;
// Do we need to position within it?
if(_current_offset != _marked_offset) {
// Grab the right block
_buffer = _data.next();
_current_block_count++;
// Skip to the right place in it
// (It should be positioned already at the start of the block,
// we need to move further inside the block)
int skipBy = _marked_offset - _current_offset;
_buffer.position(_buffer.position() + skipBy);
}

// All done
_current_offset = _marked_offset;
}

@Override
public long skip(long n) throws IOException {
dieIfClosed();
if (n < 0) {
return 0;
}
long new_offset = _current_offset + n;

if (new_offset < _current_offset) {
// wrap around in converting a VERY large long to an int
new_offset = _document_size;
} else if (new_offset > _document_size) {
new_offset = _document_size;
}
long rval = new_offset - _current_offset;
// TODO Do this better
byte[] skip = IOUtils.safelyAllocate(rval, Integer.MAX_VALUE);
readFully(skip);
return rval;
}

private void dieIfClosed() throws IOException {
if (_closed) {
throw new IOException("cannot perform requested operation on a closed stream");
}
}

private boolean atEOD() {
return _current_offset == _document_size;
}

private void checkAvaliable(int requestedSize) {
if (_closed) {
throw new IllegalStateException("cannot perform requested operation on a closed stream");
}
if (requestedSize > _document_size - _current_offset) {
throw new RuntimeException("Buffer underrun - requested " + requestedSize
+ " bytes but " + (_document_size - _current_offset) + " was available");
}
}

@Override
public void readFully(byte[] buf, int off, int len) {
if (len < 0) {
throw new RuntimeException("Can't read negative number of bytes");
}

checkAvaliable(len);

int read = 0;
while(read < len) {
if(_buffer == null || _buffer.remaining() == 0) {
_current_block_count++;
_buffer = _data.next();
}
int limit = Math.min(len-read, _buffer.remaining());
_buffer.get(buf, off+read, limit);
_current_offset += limit;
read += limit;
}
}

@Override
public byte readByte() {
return (byte) readUByte();
}

@Override
public double readDouble() {
return Double.longBitsToDouble(readLong());
}

@Override
public long readLong() {
checkAvaliable(LONG_SIZE);
byte[] data = new byte[LONG_SIZE];
readFully(data, 0, LONG_SIZE);
return LittleEndian.getLong(data, 0);
}

@Override
public short readShort() {
checkAvaliable(SHORT_SIZE);
byte[] data = new byte[SHORT_SIZE];
readFully(data, 0, SHORT_SIZE);
return LittleEndian.getShort(data);
}

@Override
public int readInt() {
checkAvaliable(INT_SIZE);
byte[] data = new byte[INT_SIZE];
readFully(data, 0, INT_SIZE);
return LittleEndian.getInt(data);
}

@Override
public int readUShort() {
checkAvaliable(SHORT_SIZE);
byte[] data = new byte[SHORT_SIZE];
readFully(data, 0, SHORT_SIZE);
return LittleEndian.getUShort(data);
}

@Override
public int readUByte() {
checkAvaliable(1);
byte[] data = new byte[1];
readFully(data, 0, 1);
if (data[0] >= 0)
return data[0];
return data[0] + 256;
}
}

+ 0
- 163
src/java/org/apache/poi/poifs/filesystem/NDocumentOutputStream.java View File

@@ -1,163 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.filesystem;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;

import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.property.DocumentProperty;

/**
* This class provides methods to write a DocumentEntry managed by a
* {@link NPOIFSFileSystem} instance.
*/
public final class NDocumentOutputStream extends OutputStream {
/** the Document's size */
private int _document_size;

/** have we been closed? */
private boolean _closed;

/** the actual Document */
private NPOIFSDocument _document;
/** and its Property */
private DocumentProperty _property;
/** our buffer, when null we're into normal blocks */
private ByteArrayOutputStream _buffer =
new ByteArrayOutputStream(POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE);
/** our main block stream, when we're into normal blocks */
private NPOIFSStream _stream;
private OutputStream _stream_output;
/**
* Create an OutputStream from the specified DocumentEntry.
* The specified entry will be emptied.
*
* @param document the DocumentEntry to be written
*/
public NDocumentOutputStream(DocumentEntry document) throws IOException {
if (!(document instanceof DocumentNode)) {
throw new IOException("Cannot open internal document storage, " + document + " not a Document Node");
}
_document_size = 0;
_closed = false;
_property = (DocumentProperty)((DocumentNode)document).getProperty();
_document = new NPOIFSDocument((DocumentNode)document);
_document.free();
}
/**
* Create an OutputStream to create the specified new Entry
*
* @param parent Where to create the Entry
* @param name Name of the new entry
*/
public NDocumentOutputStream(DirectoryEntry parent, String name) throws IOException {
if (!(parent instanceof DirectoryNode)) {
throw new IOException("Cannot open internal directory storage, " + parent + " not a Directory Node");
}
_document_size = 0;
_closed = false;

// Have an empty one created for now
DocumentEntry doc = parent.createDocument(name, new ByteArrayInputStream(new byte[0]));
_property = (DocumentProperty)((DocumentNode)doc).getProperty();
_document = new NPOIFSDocument((DocumentNode)doc);
}
private void dieIfClosed() throws IOException {
if (_closed) {
throw new IOException("cannot perform requested operation on a closed stream");
}
}
private void checkBufferSize() throws IOException {
// Have we gone over the mini stream limit yet?
if (_buffer.size() > POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
// Will need to be in the main stream
byte[] data = _buffer.toByteArray();
_buffer = null;
write(data, 0, data.length);
} else {
// So far, mini stream will work, keep going
}
}

public void write(int b) throws IOException {
dieIfClosed();
if (_buffer != null) {
_buffer.write(b);
checkBufferSize();
} else {
write(new byte[] { (byte)b });
}
}

public void write(byte[] b) throws IOException {
dieIfClosed();
if (_buffer != null) {
_buffer.write(b);
checkBufferSize();
} else {
write(b, 0, b.length);
}
}

public void write(byte[] b, int off, int len) throws IOException {
dieIfClosed();
if (_buffer != null) {
_buffer.write(b, off, len);
checkBufferSize();
} else {
if (_stream == null) {
_stream = new NPOIFSStream(_document.getFileSystem());
_stream_output = _stream.getOutputStream();
}
_stream_output.write(b, off, len);
_document_size += len;
}
}

public void close() throws IOException {
// Do we have a pending buffer for the mini stream?
if (_buffer != null) {
// It's not much data, so ask NPOIFSDocument to do it for us
_document.replaceContents(new ByteArrayInputStream(_buffer.toByteArray()));
}
else {
// We've been writing to the stream as we've gone along
// Update the details on the property now
_stream_output.close();
_property.updateSize(_document_size);
_property.setStartBlock(_stream.getStartBlock());
}
// No more!
_closed = true;
}
}

+ 0
- 947
src/java/org/apache/poi/poifs/filesystem/NPOIFSFileSystem.java View File

@@ -1,947 +0,0 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */


package org.apache.poi.poifs.filesystem;

import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;

import org.apache.poi.EmptyFileException;
import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.dev.POIFSViewable;
import org.apache.poi.poifs.nio.ByteArrayBackedDataSource;
import org.apache.poi.poifs.nio.DataSource;
import org.apache.poi.poifs.nio.FileBackedDataSource;
import org.apache.poi.poifs.property.DirectoryProperty;
import org.apache.poi.poifs.property.DocumentProperty;
import org.apache.poi.poifs.property.NPropertyTable;
import org.apache.poi.poifs.storage.BATBlock;
import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex;
import org.apache.poi.poifs.storage.BlockAllocationTableReader;
import org.apache.poi.poifs.storage.BlockAllocationTableWriter;
import org.apache.poi.poifs.storage.HeaderBlock;
import org.apache.poi.poifs.storage.HeaderBlockWriter;
import org.apache.poi.util.CloseIgnoringInputStream;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.Internal;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;

/**
* <p>This is the main class of the POIFS system; it manages the entire
* life cycle of the filesystem.</p>
* <p>This is the new NIO version, which uses less memory</p>
*/

public class NPOIFSFileSystem extends BlockStore
implements POIFSViewable, Closeable
{
//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 100_000;

private static final POILogger LOG = POILogFactory.getLogger(NPOIFSFileSystem.class);

/**
* Convenience method for clients that want to avoid the auto-close behaviour of the constructor.
*/
public static InputStream createNonClosingInputStream(InputStream is) {
return new CloseIgnoringInputStream(is);
}
private NPOIFSMiniStore _mini_store;
private NPropertyTable _property_table;
private List<BATBlock> _xbat_blocks;
private List<BATBlock> _bat_blocks;
private HeaderBlock _header;
private DirectoryNode _root;
private DataSource _data;
/**
* What big block size the file uses. Most files
* use 512 bytes, but a few use 4096
*/
private POIFSBigBlockSize bigBlockSize =
POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;

private NPOIFSFileSystem(boolean newFS)
{
_header = new HeaderBlock(bigBlockSize);
_property_table = new NPropertyTable(_header);
_mini_store = new NPOIFSMiniStore(this, _property_table.getRoot(), new ArrayList<>(), _header);
_xbat_blocks = new ArrayList<>();
_bat_blocks = new ArrayList<>();
_root = null;
if(newFS) {
// Data needs to initially hold just the header block,
// a single bat block, and an empty properties section
_data = new ByteArrayBackedDataSource(IOUtils.safelyAllocate(
bigBlockSize.getBigBlockSize()*3, MAX_RECORD_LENGTH));
}
}
/**
* Constructor, intended for writing
*/
public NPOIFSFileSystem()
{
this(true);
// Reserve block 0 for the start of the Properties Table
// Create a single empty BAT, at pop that at offset 1
_header.setBATCount(1);
_header.setBATArray(new int[] { 1 });
BATBlock bb = BATBlock.createEmptyBATBlock(bigBlockSize, false);
bb.setOurBlockIndex(1);
_bat_blocks.add(bb);

setNextBlock(0, POIFSConstants.END_OF_CHAIN);
setNextBlock(1, POIFSConstants.FAT_SECTOR_BLOCK);

_property_table.setStartBlock(0);
}

/**
* <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
* creating from an <tt>InputStream</tt>. The File will be opened read-only</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying file closed, as the file is
* kept open during normal operation to read the data out.</p>
*
* @param file the File from which to read the data
*
* @exception IOException on errors reading, or on invalid data
*/
public NPOIFSFileSystem(File file)
throws IOException
{
this(file, true);
}
/**
* <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
* creating from an <tt>InputStream</tt>.</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying file closed, as the file is
* kept open during normal operation to read the data out.</p>
*
* @param file the File from which to read or read/write the data
* @param readOnly whether the POIFileSystem will only be used in read-only mode
*
* @exception IOException on errors reading, or on invalid data
*/
public NPOIFSFileSystem(File file, boolean readOnly)
throws IOException
{
this(null, file, readOnly, true);
}
/**
* <p>Creates a POIFSFileSystem from an open <tt>FileChannel</tt>. This uses
* less memory than creating from an <tt>InputStream</tt>. The stream will
* be used in read-only mode.</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying Channel closed, as the channel is
* kept open during normal operation to read the data out.</p>
*
* @param channel the FileChannel from which to read the data
*
* @exception IOException on errors reading, or on invalid data
*/
public NPOIFSFileSystem(FileChannel channel)
throws IOException
{
this(channel, true);
}
/**
* <p>Creates a POIFSFileSystem from an open <tt>FileChannel</tt>. This uses
* less memory than creating from an <tt>InputStream</tt>.</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying Channel closed, as the channel is
* kept open during normal operation to read the data out.</p>
*
* @param channel the FileChannel from which to read or read/write the data
* @param readOnly whether the POIFileSystem will only be used in read-only mode
*
* @exception IOException on errors reading, or on invalid data
*/
public NPOIFSFileSystem(FileChannel channel, boolean readOnly)
throws IOException
{
this(channel, null, readOnly, false);
}
private NPOIFSFileSystem(FileChannel channel, File srcFile, boolean readOnly, boolean closeChannelOnError)
throws IOException
{
this(false);

try {
// Initialize the datasource
if (srcFile != null) {
if (srcFile.length() == 0)
throw new EmptyFileException();
FileBackedDataSource d = new FileBackedDataSource(srcFile, readOnly);
channel = d.getChannel();
_data = d;
} else {
_data = new FileBackedDataSource(channel, readOnly);
}
// Get the header
ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
IOUtils.readFully(channel, headerBuffer);
// Have the header processed
_header = new HeaderBlock(headerBuffer);
// Now process the various entries
readCoreContents();
} catch(IOException | RuntimeException e) {
// Comes from Iterators etc.
// TODO Decide if we can handle these better whilst
// still sticking to the iterator contract
if (closeChannelOnError && channel != null) {
channel.close();
channel = null;
}
throw e;
}
}
/**
* Create a POIFSFileSystem from an <tt>InputStream</tt>. Normally the stream is read until
* EOF. The stream is always closed.<p>
*
* Some streams are usable after reaching EOF (typically those that return <code>true</code>
* for <tt>markSupported()</tt>). In the unlikely case that the caller has such a stream
* <i>and</i> needs to use it after this constructor completes, a work around is to wrap the
* stream in order to trap the <tt>close()</tt> call. A convenience method (
* <tt>createNonClosingInputStream()</tt>) has been provided for this purpose:
* <pre>
* InputStream wrappedStream = POIFSFileSystem.createNonClosingInputStream(is);
* HSSFWorkbook wb = new HSSFWorkbook(wrappedStream);
* is.reset();
* doSomethingElse(is);
* </pre>
* Note also the special case of <tt>ByteArrayInputStream</tt> for which the <tt>close()</tt>
* method does nothing.
* <pre>
* ByteArrayInputStream bais = ...
* HSSFWorkbook wb = new HSSFWorkbook(bais); // calls bais.close() !
* bais.reset(); // no problem
* doSomethingElse(bais);
* </pre>
*
* @param stream the InputStream from which to read the data
*
* @exception IOException on errors reading, or on invalid data
*/

public NPOIFSFileSystem(InputStream stream)
throws IOException
{
this(false);
ReadableByteChannel channel = null;
boolean success = false;
try {
// Turn our InputStream into something NIO based
channel = Channels.newChannel(stream);
// Get the header
ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
IOUtils.readFully(channel, headerBuffer);
// Have the header processed
_header = new HeaderBlock(headerBuffer);
// Sanity check the block count
BlockAllocationTableReader.sanityCheckBlockCount(_header.getBATCount());
// We need to buffer the whole file into memory when
// working with an InputStream.
// The max possible size is when each BAT block entry is used
long maxSize = BATBlock.calculateMaximumSize(_header);
if (maxSize > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unable read a >2gb file via an InputStream");
}
ByteBuffer data = ByteBuffer.allocate((int)maxSize);
// Copy in the header
headerBuffer.position(0);
data.put(headerBuffer);
data.position(headerBuffer.capacity());
// Now read the rest of the stream
IOUtils.readFully(channel, data);
success = true;
// Turn it into a DataSource
_data = new ByteArrayBackedDataSource(data.array(), data.position());
} finally {
// As per the constructor contract, always close the stream
if(channel != null)
channel.close();
closeInputStream(stream, success);
}
// Now process the various entries
readCoreContents();
}
/**
* @param stream the stream to be closed
* @param success <code>false</code> if an exception is currently being thrown in the calling method
*/
private void closeInputStream(InputStream stream, boolean success) {
try {
stream.close();
} catch (IOException e) {
if(success) {
throw new RuntimeException(e);
}
// else not success? Try block did not complete normally
// just print stack trace and leave original ex to be thrown
LOG.log(POILogger.ERROR, "can't close input stream", e);
}
}

/**
* Read and process the PropertiesTable and the
* FAT / XFAT blocks, so that we're ready to
* work with the file
*/
private void readCoreContents() throws IOException {
// Grab the block size
bigBlockSize = _header.getBigBlockSize();
// Each block should only ever be used by one of the
// FAT, XFAT or Property Table. Ensure it does
ChainLoopDetector loopDetector = getChainLoopDetector();
// Read the FAT blocks
for(int fatAt : _header.getBATArray()) {
readBAT(fatAt, loopDetector);
}
// Work out how many FAT blocks remain in the XFATs
int remainingFATs = _header.getBATCount() - _header.getBATArray().length;
// Now read the XFAT blocks, and the FATs within them
BATBlock xfat;
int nextAt = _header.getXBATIndex();
for(int i=0; i<_header.getXBATCount(); i++) {
loopDetector.claim(nextAt);
ByteBuffer fatData = getBlockAt(nextAt);
xfat = BATBlock.createBATBlock(bigBlockSize, fatData);
xfat.setOurBlockIndex(nextAt);
nextAt = xfat.getValueAt(bigBlockSize.getXBATEntriesPerBlock());
_xbat_blocks.add(xfat);
// Process all the (used) FATs from this XFAT
int xbatFATs = Math.min(remainingFATs, bigBlockSize.getXBATEntriesPerBlock());
for(int j=0; j<xbatFATs; j++) {
int fatAt = xfat.getValueAt(j);
if(fatAt == POIFSConstants.UNUSED_BLOCK || fatAt == POIFSConstants.END_OF_CHAIN) break;
readBAT(fatAt, loopDetector);
}
remainingFATs -= xbatFATs;
}
// We're now able to load steams
// Use this to read in the properties
_property_table = new NPropertyTable(_header, this);
// Finally read the Small Stream FAT (SBAT) blocks
BATBlock sfat;
List<BATBlock> sbats = new ArrayList<>();
_mini_store = new NPOIFSMiniStore(this, _property_table.getRoot(), sbats, _header);
nextAt = _header.getSBATStart();
for(int i=0; i<_header.getSBATCount() && nextAt != POIFSConstants.END_OF_CHAIN; i++) {
loopDetector.claim(nextAt);
ByteBuffer fatData = getBlockAt(nextAt);
sfat = BATBlock.createBATBlock(bigBlockSize, fatData);
sfat.setOurBlockIndex(nextAt);
sbats.add(sfat);
nextAt = getNextBlock(nextAt);
}
}
private void readBAT(int batAt, ChainLoopDetector loopDetector) throws IOException {
loopDetector.claim(batAt);
ByteBuffer fatData = getBlockAt(batAt);
BATBlock bat = BATBlock.createBATBlock(bigBlockSize, fatData);
bat.setOurBlockIndex(batAt);
_bat_blocks.add(bat);
}
private BATBlock createBAT(int offset, boolean isBAT) throws IOException {
// Create a new BATBlock
BATBlock newBAT = BATBlock.createEmptyBATBlock(bigBlockSize, !isBAT);
newBAT.setOurBlockIndex(offset);
// Ensure there's a spot in the file for it
ByteBuffer buffer = ByteBuffer.allocate(bigBlockSize.getBigBlockSize());
int writeTo = (1+offset) * bigBlockSize.getBigBlockSize(); // Header isn't in BATs
_data.write(buffer, writeTo);
// All done
return newBAT;
}
/**
* Load the block at the given offset.
*/
@Override
protected ByteBuffer getBlockAt(final int offset) throws IOException {
// The header block doesn't count, so add one
long blockWanted = offset + 1L;
long startAt = blockWanted * bigBlockSize.getBigBlockSize();
try {
return _data.read(bigBlockSize.getBigBlockSize(), startAt);
} catch (IndexOutOfBoundsException e) {
IndexOutOfBoundsException wrapped = new IndexOutOfBoundsException("Block " + offset + " not found");
wrapped.initCause(e);
throw wrapped;
}
}
/**
* Load the block at the given offset,
* extending the file if needed
*/
@Override
protected ByteBuffer createBlockIfNeeded(final int offset) throws IOException {
try {
return getBlockAt(offset);
} catch(IndexOutOfBoundsException e) {
// The header block doesn't count, so add one
long startAt = (offset+1L) * bigBlockSize.getBigBlockSize();
// Allocate and write
ByteBuffer buffer = ByteBuffer.allocate(getBigBlockSize());
_data.write(buffer, startAt);
// Retrieve the properly backed block
return getBlockAt(offset);
}
}
/**
* Returns the BATBlock that handles the specified offset,
* and the relative index within it
*/
@Override
protected BATBlockAndIndex getBATBlockAndIndex(final int offset) {
return BATBlock.getBATBlockAndIndex(
offset, _header, _bat_blocks
);
}
/**
* Works out what block follows the specified one.
*/
@Override
protected int getNextBlock(final int offset) {
BATBlockAndIndex bai = getBATBlockAndIndex(offset);
return bai.getBlock().getValueAt( bai.getIndex() );
}
/**
* Changes the record of what block follows the specified one.
*/
@Override
protected void setNextBlock(final int offset, final int nextBlock) {
BATBlockAndIndex bai = getBATBlockAndIndex(offset);
bai.getBlock().setValueAt(
bai.getIndex(), nextBlock
);
}
/**
* Finds a free block, and returns its offset.
* This method will extend the file if needed, and if doing
* so, allocate new FAT blocks to address the extra space.
*/
@Override
protected int getFreeBlock() throws IOException {
int numSectors = bigBlockSize.getBATEntriesPerBlock();

// First up, do we have any spare ones?
int offset = 0;
for (BATBlock bat : _bat_blocks) {
if(bat.hasFreeSectors()) {
// Claim one of them and return it
for(int j=0; j<numSectors; j++) {
int batValue = bat.getValueAt(j);
if(batValue == POIFSConstants.UNUSED_BLOCK) {
// Bingo
return offset + j;
}
}
}
// Move onto the next BAT
offset += numSectors;
}
// If we get here, then there aren't any free sectors
// in any of the BATs, so we need another BAT
BATBlock bat = createBAT(offset, true);
bat.setValueAt(0, POIFSConstants.FAT_SECTOR_BLOCK);
_bat_blocks.add(bat);
// Now store a reference to the BAT in the required place
if(_header.getBATCount() >= 109) {
// Needs to come from an XBAT
BATBlock xbat = null;
for(BATBlock x : _xbat_blocks) {
if(x.hasFreeSectors()) {
xbat = x;
break;
}
}
if(xbat == null) {
// Oh joy, we need a new XBAT too...
xbat = createBAT(offset+1, false);
// Allocate our new BAT as the first block in the XBAT
xbat.setValueAt(0, offset);
// And allocate the XBAT in the BAT
bat.setValueAt(1, POIFSConstants.DIFAT_SECTOR_BLOCK);
// Will go one place higher as XBAT added in
offset++;
// Chain it
if(_xbat_blocks.size() == 0) {
_header.setXBATStart(offset);
} else {
_xbat_blocks.get(_xbat_blocks.size()-1).setValueAt(
bigBlockSize.getXBATEntriesPerBlock(), offset
);
}
_xbat_blocks.add(xbat);
_header.setXBATCount(_xbat_blocks.size());
} else {
// Allocate our BAT in the existing XBAT with space
for(int i=0; i<bigBlockSize.getXBATEntriesPerBlock(); i++) {
if(xbat.getValueAt(i) == POIFSConstants.UNUSED_BLOCK) {
xbat.setValueAt(i, offset);
break;
}
}
}
} else {
// Store us in the header
int[] newBATs = new int[_header.getBATCount()+1];
System.arraycopy(_header.getBATArray(), 0, newBATs, 0, newBATs.length-1);
newBATs[newBATs.length-1] = offset;
_header.setBATArray(newBATs);
}
_header.setBATCount(_bat_blocks.size());
// The current offset stores us, but the next one is free
return offset+1;
}
protected long size() throws IOException {
return _data.size();
}
@Override
protected ChainLoopDetector getChainLoopDetector() throws IOException {
return new ChainLoopDetector(_data.size());
}

/**
* For unit testing only! Returns the underlying
* properties table
*/
NPropertyTable _get_property_table() {
return _property_table;
}
/**
* Returns the MiniStore, which performs a similar low
* level function to this, except for the small blocks.
*/
public NPOIFSMiniStore getMiniStore() {
return _mini_store;
}

/**
* add a new POIFSDocument to the FileSytem
*
* @param document the POIFSDocument being added
*/
void addDocument(final NPOIFSDocument document)
{
_property_table.addProperty(document.getDocumentProperty());
}

/**
* add a new DirectoryProperty to the FileSystem
*
* @param directory the DirectoryProperty being added
*/
void addDirectory(final DirectoryProperty directory)
{
_property_table.addProperty(directory);
}

/**
* Create a new document to be added to the root directory
*
* @param stream the InputStream from which the document's data
* will be obtained
* @param name the name of the new POIFSDocument
*
* @return the new DocumentEntry
*
* @exception IOException on error creating the new POIFSDocument
*/

public DocumentEntry createDocument(final InputStream stream,
final String name)
throws IOException
{
return getRoot().createDocument(name, stream);
}

/**
* create a new DocumentEntry in the root entry; the data will be
* provided later
*
* @param name the name of the new DocumentEntry
* @param size the size of the new DocumentEntry
* @param writer the writer of the new DocumentEntry
*
* @return the new DocumentEntry
*
* @exception IOException
*/
public DocumentEntry createDocument(final String name, final int size,
final POIFSWriterListener writer)
throws IOException
{
return getRoot().createDocument(name, size, writer);
}

/**
* create a new DirectoryEntry in the root directory
*
* @param name the name of the new DirectoryEntry
*
* @return the new DirectoryEntry
*
* @exception IOException on name duplication
*/

public DirectoryEntry createDirectory(final String name)
throws IOException
{
return getRoot().createDirectory(name);
}
/**
* Set the contents of a document in the root directory,
* creating if needed, otherwise updating
*
* @param stream the InputStream from which the document's data
* will be obtained
* @param name the name of the new or existing POIFSDocument
*
* @return the new or updated DocumentEntry
*
* @exception IOException on error populating the POIFSDocument
*/

public DocumentEntry createOrUpdateDocument(final InputStream stream,
final String name)
throws IOException
{
return getRoot().createOrUpdateDocument(name, stream);
}
/**
* Does the filesystem support an in-place write via
* {@link #writeFilesystem()} ? If false, only writing out to
* a brand new file via {@link #writeFilesystem(OutputStream)}
* is supported.
*/
public boolean isInPlaceWriteable() {
if(_data instanceof FileBackedDataSource) {
if ( ((FileBackedDataSource)_data).isWriteable() ) {
return true;
}
}
return false;
}
/**
* Write the filesystem out to the open file. Will thrown an
* {@link IllegalArgumentException} if opened from an
* {@link InputStream}.
*
* @exception IOException thrown on errors writing to the stream
*/
public void writeFilesystem() throws IOException {
if(_data instanceof FileBackedDataSource) {
// Good, correct type
} else {
throw new IllegalArgumentException(
"POIFS opened from an inputstream, so writeFilesystem() may " +
"not be called. Use writeFilesystem(OutputStream) instead"
);
}
if (! ((FileBackedDataSource)_data).isWriteable()) {
throw new IllegalArgumentException(
"POIFS opened in read only mode, so writeFilesystem() may " +
"not be called. Open the FileSystem in read-write mode first"
);
}
syncWithDataSource();
}

/**
* Write the filesystem out
*
* @param stream the OutputStream to which the filesystem will be
* written
*
* @exception IOException thrown on errors writing to the stream
*/
public void writeFilesystem(final OutputStream stream) throws IOException {
// Have the datasource updated
syncWithDataSource();
// Now copy the contents to the stream
_data.copyTo(stream);
}
/**
* Has our in-memory objects write their state
* to their backing blocks
*/
private void syncWithDataSource() throws IOException {
// Mini Stream + SBATs first, as mini-stream details have
// to be stored in the Root Property
_mini_store.syncWithDataSource();
// Properties
NPOIFSStream propStream = new NPOIFSStream(this, _header.getPropertyStart());
_property_table.preWrite();
_property_table.write(propStream);
// _header.setPropertyStart has been updated on write ...
// HeaderBlock
HeaderBlockWriter hbw = new HeaderBlockWriter(_header);
hbw.writeBlock( getBlockAt(-1) );
// BATs
for(BATBlock bat : _bat_blocks) {
ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
BlockAllocationTableWriter.writeBlock(bat, block);
}
// XBats
for(BATBlock bat : _xbat_blocks) {
ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
BlockAllocationTableWriter.writeBlock(bat, block);
}
}
/**
* Closes the FileSystem, freeing any underlying files, streams
* and buffers. After this, you will be unable to read or
* write from the FileSystem.
*/
public void close() throws IOException {
_data.close();
}

/**
* read in a file and write it back out again
*
* @param args names of the files; arg[ 0 ] is the input file,
* arg[ 1 ] is the output file
*
* @exception IOException
*/
public static void main(String args[]) throws IOException {
if (args.length != 2) {
System.err.println(
"two arguments required: input filename and output filename");
System.exit(1);
}

try (FileInputStream istream = new FileInputStream(args[0])) {
try (FileOutputStream ostream = new FileOutputStream(args[1])) {
try (NPOIFSFileSystem fs = new NPOIFSFileSystem(istream)) {
fs.writeFilesystem(ostream);
}
}
}
}

/**
* Get the root entry
*
* @return the root entry
*/
public DirectoryNode getRoot() {
if (_root == null) {
_root = new DirectoryNode(_property_table.getRoot(), this, null);
}
return _root;
}

/**
* open a document in the root entry's list of entries
*
* @param documentName the name of the document to be opened
*
* @return a newly opened DocumentInputStream
*
* @exception IOException if the document does not exist or the
* name is that of a DirectoryEntry
*/
public DocumentInputStream createDocumentInputStream(
final String documentName) throws IOException {
return getRoot().createDocumentInputStream(documentName);
}

/**
* remove an entry
*
* @param entry to be removed
*/
void remove(EntryNode entry) throws IOException {
// If it's a document, free the blocks
if (entry instanceof DocumentEntry) {
NPOIFSDocument doc = new NPOIFSDocument((DocumentProperty)entry.getProperty(), this);
doc.free();
}
// Now zap it from the properties list
_property_table.removeProperty(entry.getProperty());
}
/* ********** START begin implementation of POIFSViewable ********** */

/**
* Get an array of objects, some of which may implement
* POIFSViewable
*
* @return an array of Object; may not be null, but may be empty
*/
public Object [] getViewableArray() {
if (preferArray()) {
return getRoot().getViewableArray();
}

return new Object[ 0 ];
}

/**
* Get an Iterator of objects, some of which may implement
* POIFSViewable
*
* @return an Iterator; may not be null, but may have an empty
* back end store
*/

public Iterator<Object> getViewableIterator() {
if (!preferArray()) {
return getRoot().getViewableIterator();
}

return Collections.emptyList().iterator();
}

/**
* Give viewers a hint as to whether to call getViewableArray or
* getViewableIterator
*
* @return true if a viewer should call getViewableArray, false if
* a viewer should call getViewableIterator
*/

public boolean preferArray() {
return getRoot().preferArray();
}

/**
* Provides a short description of the object, to be used when a
* POIFSViewable object has not provided its contents.
*
* @return short description
*/

public String getShortDescription() {
return "POIFS FileSystem";
}

/* ********** END begin implementation of POIFSViewable ********** */

/**
* @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
*/
public int getBigBlockSize() {
return bigBlockSize.getBigBlockSize();
}

/**
* @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
*/
public POIFSBigBlockSize getBigBlockSizeDetails() {
return bigBlockSize;
}

@Override
protected int getBlockStoreBlockSize() {
return getBigBlockSize();
}

@Internal
public NPropertyTable getPropertyTable() {
return _property_table;
}

@Internal
public HeaderBlock getHeaderBlock() {
return _header;
}
}


src/java/org/apache/poi/poifs/filesystem/NPOIFSDocument.java → src/java/org/apache/poi/poifs/filesystem/POIFSDocument.java View File

@@ -36,23 +36,23 @@ import org.apache.poi.util.IOUtils;

/**
* This class manages a document in the NIO POIFS filesystem.
* This is the {@link NPOIFSFileSystem} version.
* This is the {@link POIFSFileSystem} version.
*/
public final class NPOIFSDocument implements POIFSViewable, Iterable<ByteBuffer> {
public final class POIFSDocument implements POIFSViewable, Iterable<ByteBuffer> {

//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 100_000;

private DocumentProperty _property;

private NPOIFSFileSystem _filesystem;
private NPOIFSStream _stream;
private POIFSFileSystem _filesystem;
private POIFSStream _stream;
private int _block_size;
/**
* Constructor for an existing Document
*/
public NPOIFSDocument(DocumentNode document) {
public POIFSDocument(DocumentNode document) {
this((DocumentProperty)document.getProperty(),
((DirectoryNode)document.getParent()).getNFileSystem());
}
@@ -60,15 +60,15 @@ public final class NPOIFSDocument implements POIFSViewable, Iterable<ByteBuffer>
/**
* Constructor for an existing Document
*/
public NPOIFSDocument(DocumentProperty property, NPOIFSFileSystem filesystem) {
public POIFSDocument(DocumentProperty property, POIFSFileSystem filesystem) {
this._property = property;
this._filesystem = filesystem;

if(property.getSize() < POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
_stream = new NPOIFSStream(_filesystem.getMiniStore(), property.getStartBlock());
_stream = new POIFSStream(_filesystem.getMiniStore(), property.getStartBlock());
_block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
} else {
_stream = new NPOIFSStream(_filesystem, property.getStartBlock());
_stream = new POIFSStream(_filesystem, property.getStartBlock());
_block_size = _filesystem.getBlockStoreBlockSize();
}
}
@@ -79,7 +79,7 @@ public final class NPOIFSDocument implements POIFSViewable, Iterable<ByteBuffer>
* @param name the name of the POIFSDocument
* @param stream the InputStream we read data from
*/
public NPOIFSDocument(String name, NPOIFSFileSystem filesystem, InputStream stream)
public POIFSDocument(String name, POIFSFileSystem filesystem, InputStream stream)
throws IOException
{
this._filesystem = filesystem;
@@ -93,31 +93,29 @@ public final class NPOIFSDocument implements POIFSViewable, Iterable<ByteBuffer>
_property.setDocument(this);
}
public NPOIFSDocument(String name, int size, NPOIFSFileSystem filesystem, POIFSWriterListener writer)
public POIFSDocument(String name, final int size, POIFSFileSystem filesystem, POIFSWriterListener writer)
throws IOException
{
this._filesystem = filesystem;

if (size < POIFSConstants.BIG_BLOCK_MINIMUM_DOCUMENT_SIZE) {
_stream = new NPOIFSStream(filesystem.getMiniStore());
_stream = new POIFSStream(filesystem.getMiniStore());
_block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
} else {
_stream = new NPOIFSStream(filesystem);
_stream = new POIFSStream(filesystem);
_block_size = _filesystem.getBlockStoreBlockSize();
}
OutputStream innerOs = _stream.getOutputStream();
DocumentOutputStream os = new DocumentOutputStream(innerOs, size);
POIFSDocumentPath path = new POIFSDocumentPath(name.split("\\\\"));
String docName = path.getComponent(path.length()-1);
POIFSWriterEvent event = new POIFSWriterEvent(os, path, docName, size);
writer.processPOIFSWriterEvent(event);
innerOs.close();

// And build the property for it
this._property = new DocumentProperty(name, size);
_property.setStartBlock(_stream.getStartBlock());
_property.setDocument(this);

try (DocumentOutputStream os = new DocumentOutputStream(this, size)) {
POIFSDocumentPath path = new POIFSDocumentPath(name.split("\\\\"));
String docName = path.getComponent(path.length() - 1);
POIFSWriterEvent event = new POIFSWriterEvent(os, path, docName, size);
writer.processPOIFSWriterEvent(event);
}
}
/**
@@ -131,10 +129,10 @@ public final class NPOIFSDocument implements POIFSViewable, Iterable<ByteBuffer>
// Do we need to store as a mini stream or a full one?
long streamBlockSize = IOUtils.skipFully(bis, bigBlockSize);
if (streamBlockSize < bigBlockSize) {
_stream = new NPOIFSStream(_filesystem.getMiniStore());
_stream = new POIFSStream(_filesystem.getMiniStore());
_block_size = _filesystem.getMiniStore().getBlockStoreBlockSize();
} else {
_stream = new NPOIFSStream(_filesystem);
_stream = new POIFSStream(_filesystem);
_block_size = _filesystem.getBlockStoreBlockSize();
}

@@ -167,7 +165,7 @@ public final class NPOIFSDocument implements POIFSViewable, Iterable<ByteBuffer>
_property.setStartBlock(POIFSConstants.END_OF_CHAIN);
}
NPOIFSFileSystem getFileSystem()
POIFSFileSystem getFileSystem()
{
return _filesystem;
}

+ 880
- 54
src/java/org/apache/poi/poifs/filesystem/POIFSFileSystem.java View File

@@ -19,35 +19,235 @@

package org.apache.poi.poifs.filesystem;

import java.io.*;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;

import org.apache.poi.EmptyFileException;
import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.dev.POIFSViewable;
import org.apache.poi.util.CloseIgnoringInputStream;
import org.apache.poi.poifs.nio.ByteArrayBackedDataSource;
import org.apache.poi.poifs.nio.DataSource;
import org.apache.poi.poifs.nio.FileBackedDataSource;
import org.apache.poi.poifs.property.DirectoryProperty;
import org.apache.poi.poifs.property.DocumentProperty;
import org.apache.poi.poifs.property.PropertyTable;
import org.apache.poi.poifs.storage.BATBlock;
import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex;
import org.apache.poi.poifs.storage.HeaderBlock;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.Internal;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;

/**
* Transition class for the move from {@link POIFSFileSystem} to
* {@link OPOIFSFileSystem}, and from {@link NPOIFSFileSystem} to
* {@link POIFSFileSystem}.
* <p>This has been updated to be powered by the NIO-based NPOIFS
* {@link NPOIFSFileSystem}.
* <p>This is the main class of the POIFS system; it manages the entire
* life cycle of the filesystem.</p>
* <p>This is the new NIO version, which uses less memory</p>
*/
public class POIFSFileSystem
extends NPOIFSFileSystem // TODO Temporary workaround during #56791
implements POIFSViewable {

public class POIFSFileSystem extends BlockStore
implements POIFSViewable, Closeable
{
//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 100_000;

private static final POILogger LOG = POILogFactory.getLogger(POIFSFileSystem.class);

/**
* Convenience method for clients that want to avoid the auto-close behaviour of the constructor.
* Maximum number size (in blocks) of the allocation table as supported by
* POI.<p>
*
* This constant has been chosen to help POI identify corrupted data in the
* header block (rather than crash immediately with {@link OutOfMemoryError}
* ). It's not clear if the compound document format actually specifies any
* upper limits. For files with 512 byte blocks, having an allocation table
* of 65,335 blocks would correspond to a total file size of 4GB. Needless
* to say, POI probably cannot handle files anywhere near that size.
*/
public static InputStream createNonClosingInputStream(InputStream is) {
return new CloseIgnoringInputStream(is);
}
private static final int MAX_BLOCK_COUNT = 65535;

private POIFSMiniStore _mini_store;
private PropertyTable _property_table;
private List<BATBlock> _xbat_blocks;
private List<BATBlock> _bat_blocks;
private HeaderBlock _header;
private DirectoryNode _root;
private DataSource _data;
/**
* What big block size the file uses. Most files
* use 512 bytes, but a few use 4096
*/
private POIFSBigBlockSize bigBlockSize =
POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS;

private POIFSFileSystem(boolean newFS)
{
_header = new HeaderBlock(bigBlockSize);
_property_table = new PropertyTable(_header);
_mini_store = new POIFSMiniStore(this, _property_table.getRoot(), new ArrayList<>(), _header);
_xbat_blocks = new ArrayList<>();
_bat_blocks = new ArrayList<>();
_root = null;
if(newFS) {
// Data needs to initially hold just the header block,
// a single bat block, and an empty properties section
_data = new ByteArrayBackedDataSource(IOUtils.safelyAllocate(
bigBlockSize.getBigBlockSize()*3, MAX_RECORD_LENGTH));
}
}
/**
* Constructor, intended for writing
*/
public POIFSFileSystem() {
super();
public POIFSFileSystem()
{
this(true);
// Reserve block 0 for the start of the Properties Table
// Create a single empty BAT, at pop that at offset 1
_header.setBATCount(1);
_header.setBATArray(new int[] { 1 });
BATBlock bb = BATBlock.createEmptyBATBlock(bigBlockSize, false);
bb.setOurBlockIndex(1);
_bat_blocks.add(bb);

setNextBlock(0, POIFSConstants.END_OF_CHAIN);
setNextBlock(1, POIFSConstants.FAT_SECTOR_BLOCK);

_property_table.setStartBlock(0);
}

/**
* <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
* creating from an <tt>InputStream</tt>. The File will be opened read-only</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying file closed, as the file is
* kept open during normal operation to read the data out.</p>
*
* @param file the File from which to read the data
*
* @exception IOException on errors reading, or on invalid data
*/
public POIFSFileSystem(File file)
throws IOException
{
this(file, true);
}
/**
* <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
* creating from an <tt>InputStream</tt>.</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying file closed, as the file is
* kept open during normal operation to read the data out.</p>
*
* @param file the File from which to read or read/write the data
* @param readOnly whether the POIFileSystem will only be used in read-only mode
*
* @exception IOException on errors reading, or on invalid data
*/
public POIFSFileSystem(File file, boolean readOnly)
throws IOException
{
this(null, file, readOnly, true);
}
/**
* <p>Creates a POIFSFileSystem from an open <tt>FileChannel</tt>. This uses
* less memory than creating from an <tt>InputStream</tt>. The stream will
* be used in read-only mode.</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying Channel closed, as the channel is
* kept open during normal operation to read the data out.</p>
*
* @param channel the FileChannel from which to read the data
*
* @exception IOException on errors reading, or on invalid data
*/
public POIFSFileSystem(FileChannel channel)
throws IOException
{
this(channel, true);
}
/**
* <p>Creates a POIFSFileSystem from an open <tt>FileChannel</tt>. This uses
* less memory than creating from an <tt>InputStream</tt>.</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying Channel closed, as the channel is
* kept open during normal operation to read the data out.</p>
*
* @param channel the FileChannel from which to read or read/write the data
* @param readOnly whether the POIFileSystem will only be used in read-only mode
*
* @exception IOException on errors reading, or on invalid data
*/
public POIFSFileSystem(FileChannel channel, boolean readOnly)
throws IOException
{
this(channel, null, readOnly, false);
}
private POIFSFileSystem(FileChannel channel, File srcFile, boolean readOnly, boolean closeChannelOnError)
throws IOException
{
this(false);

try {
// Initialize the datasource
if (srcFile != null) {
if (srcFile.length() == 0)
throw new EmptyFileException();
FileBackedDataSource d = new FileBackedDataSource(srcFile, readOnly);
channel = d.getChannel();
_data = d;
} else {
_data = new FileBackedDataSource(channel, readOnly);
}
// Get the header
ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
IOUtils.readFully(channel, headerBuffer);
// Have the header processed
_header = new HeaderBlock(headerBuffer);
// Now process the various entries
readCoreContents();
} catch(IOException | RuntimeException e) {
// Comes from Iterators etc.
// TODO Decide if we can handle these better whilst
// still sticking to the iterator contract
if (closeChannelOnError && channel != null) {
channel.close();
}
throw e;
}
}
/**
* Create a POIFSFileSystem from an <tt>InputStream</tt>. Normally the stream is read until
* EOF. The stream is always closed.<p>
@@ -77,43 +277,647 @@ public class POIFSFileSystem
* @exception IOException on errors reading, or on invalid data
*/

public POIFSFileSystem(InputStream stream) throws IOException {
super(stream);
public POIFSFileSystem(InputStream stream)
throws IOException
{
this(false);

boolean success = false;
try (ReadableByteChannel channel = Channels.newChannel(stream)) {
// Turn our InputStream into something NIO based

// Get the header
ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
IOUtils.readFully(channel, headerBuffer);

// Have the header processed
_header = new HeaderBlock(headerBuffer);

// Sanity check the block count
sanityCheckBlockCount(_header.getBATCount());

// We need to buffer the whole file into memory when
// working with an InputStream.
// The max possible size is when each BAT block entry is used
long maxSize = BATBlock.calculateMaximumSize(_header);
if (maxSize > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Unable read a >2gb file via an InputStream");
}
ByteBuffer data = ByteBuffer.allocate((int) maxSize);

// Copy in the header
headerBuffer.position(0);
data.put(headerBuffer);
data.position(headerBuffer.capacity());

// Now read the rest of the stream
IOUtils.readFully(channel, data);
success = true;

// Turn it into a DataSource
_data = new ByteArrayBackedDataSource(data.array(), data.position());
} finally {
// As per the constructor contract, always close the stream
closeInputStream(stream, success);
}
// Now process the various entries
readCoreContents();
}
/**
* @param stream the stream to be closed
* @param success <code>false</code> if an exception is currently being thrown in the calling method
*/
private void closeInputStream(InputStream stream, boolean success) {
try {
stream.close();
} catch (IOException e) {
if(success) {
throw new RuntimeException(e);
}
// else not success? Try block did not complete normally
// just print stack trace and leave original ex to be thrown
LOG.log(POILogger.ERROR, "can't close input stream", e);
}
}

/**
* Read and process the PropertiesTable and the
* FAT / XFAT blocks, so that we're ready to
* work with the file
*/
private void readCoreContents() throws IOException {
// Grab the block size
bigBlockSize = _header.getBigBlockSize();
// Each block should only ever be used by one of the
// FAT, XFAT or Property Table. Ensure it does
ChainLoopDetector loopDetector = getChainLoopDetector();
// Read the FAT blocks
for(int fatAt : _header.getBATArray()) {
readBAT(fatAt, loopDetector);
}
// Work out how many FAT blocks remain in the XFATs
int remainingFATs = _header.getBATCount() - _header.getBATArray().length;
// Now read the XFAT blocks, and the FATs within them
BATBlock xfat;
int nextAt = _header.getXBATIndex();
for(int i=0; i<_header.getXBATCount(); i++) {
loopDetector.claim(nextAt);
ByteBuffer fatData = getBlockAt(nextAt);
xfat = BATBlock.createBATBlock(bigBlockSize, fatData);
xfat.setOurBlockIndex(nextAt);
nextAt = xfat.getValueAt(bigBlockSize.getXBATEntriesPerBlock());
_xbat_blocks.add(xfat);
// Process all the (used) FATs from this XFAT
int xbatFATs = Math.min(remainingFATs, bigBlockSize.getXBATEntriesPerBlock());
for(int j=0; j<xbatFATs; j++) {
int fatAt = xfat.getValueAt(j);
if(fatAt == POIFSConstants.UNUSED_BLOCK || fatAt == POIFSConstants.END_OF_CHAIN) break;
readBAT(fatAt, loopDetector);
}
remainingFATs -= xbatFATs;
}
// We're now able to load steams
// Use this to read in the properties
_property_table = new PropertyTable(_header, this);
// Finally read the Small Stream FAT (SBAT) blocks
BATBlock sfat;
List<BATBlock> sbats = new ArrayList<>();
_mini_store = new POIFSMiniStore(this, _property_table.getRoot(), sbats, _header);
nextAt = _header.getSBATStart();
for(int i=0; i<_header.getSBATCount() && nextAt != POIFSConstants.END_OF_CHAIN; i++) {
loopDetector.claim(nextAt);
ByteBuffer fatData = getBlockAt(nextAt);
sfat = BATBlock.createBATBlock(bigBlockSize, fatData);
sfat.setOurBlockIndex(nextAt);
sbats.add(sfat);
nextAt = getNextBlock(nextAt);
}
}
private void readBAT(int batAt, ChainLoopDetector loopDetector) throws IOException {
loopDetector.claim(batAt);
ByteBuffer fatData = getBlockAt(batAt);
BATBlock bat = BATBlock.createBATBlock(bigBlockSize, fatData);
bat.setOurBlockIndex(batAt);
_bat_blocks.add(bat);
}
private BATBlock createBAT(int offset, boolean isBAT) throws IOException {
// Create a new BATBlock
BATBlock newBAT = BATBlock.createEmptyBATBlock(bigBlockSize, !isBAT);
newBAT.setOurBlockIndex(offset);
// Ensure there's a spot in the file for it
ByteBuffer buffer = ByteBuffer.allocate(bigBlockSize.getBigBlockSize());
int writeTo = (1+offset) * bigBlockSize.getBigBlockSize(); // Header isn't in BATs
_data.write(buffer, writeTo);
// All done
return newBAT;
}
/**
* Load the block at the given offset.
*/
@Override
protected ByteBuffer getBlockAt(final int offset) throws IOException {
// The header block doesn't count, so add one
long blockWanted = offset + 1L;
long startAt = blockWanted * bigBlockSize.getBigBlockSize();
try {
return _data.read(bigBlockSize.getBigBlockSize(), startAt);
} catch (IndexOutOfBoundsException e) {
IndexOutOfBoundsException wrapped = new IndexOutOfBoundsException("Block " + offset + " not found");
wrapped.initCause(e);
throw wrapped;
}
}
/**
* Load the block at the given offset,
* extending the file if needed
*/
@Override
protected ByteBuffer createBlockIfNeeded(final int offset) throws IOException {
try {
return getBlockAt(offset);
} catch(IndexOutOfBoundsException e) {
// The header block doesn't count, so add one
long startAt = (offset+1L) * bigBlockSize.getBigBlockSize();
// Allocate and write
ByteBuffer buffer = ByteBuffer.allocate(getBigBlockSize());
_data.write(buffer, startAt);
// Retrieve the properly backed block
return getBlockAt(offset);
}
}
/**
* Returns the BATBlock that handles the specified offset,
* and the relative index within it
*/
@Override
protected BATBlockAndIndex getBATBlockAndIndex(final int offset) {
return BATBlock.getBATBlockAndIndex(
offset, _header, _bat_blocks
);
}
/**
* Works out what block follows the specified one.
*/
@Override
protected int getNextBlock(final int offset) {
BATBlockAndIndex bai = getBATBlockAndIndex(offset);
return bai.getBlock().getValueAt( bai.getIndex() );
}
/**
* Changes the record of what block follows the specified one.
*/
@Override
protected void setNextBlock(final int offset, final int nextBlock) {
BATBlockAndIndex bai = getBATBlockAndIndex(offset);
bai.getBlock().setValueAt(
bai.getIndex(), nextBlock
);
}
/**
* Finds a free block, and returns its offset.
* This method will extend the file if needed, and if doing
* so, allocate new FAT blocks to address the extra space.
*/
@Override
protected int getFreeBlock() throws IOException {
int numSectors = bigBlockSize.getBATEntriesPerBlock();

// First up, do we have any spare ones?
int offset = 0;
for (BATBlock bat : _bat_blocks) {
if(bat.hasFreeSectors()) {
// Claim one of them and return it
for(int j=0; j<numSectors; j++) {
int batValue = bat.getValueAt(j);
if(batValue == POIFSConstants.UNUSED_BLOCK) {
// Bingo
return offset + j;
}
}
}
// Move onto the next BAT
offset += numSectors;
}
// If we get here, then there aren't any free sectors
// in any of the BATs, so we need another BAT
BATBlock bat = createBAT(offset, true);
bat.setValueAt(0, POIFSConstants.FAT_SECTOR_BLOCK);
_bat_blocks.add(bat);
// Now store a reference to the BAT in the required place
if(_header.getBATCount() >= 109) {
// Needs to come from an XBAT
BATBlock xbat = null;
for(BATBlock x : _xbat_blocks) {
if(x.hasFreeSectors()) {
xbat = x;
break;
}
}
if(xbat == null) {
// Oh joy, we need a new XBAT too...
xbat = createBAT(offset+1, false);
// Allocate our new BAT as the first block in the XBAT
xbat.setValueAt(0, offset);
// And allocate the XBAT in the BAT
bat.setValueAt(1, POIFSConstants.DIFAT_SECTOR_BLOCK);
// Will go one place higher as XBAT added in
offset++;
// Chain it
if(_xbat_blocks.size() == 0) {
_header.setXBATStart(offset);
} else {
_xbat_blocks.get(_xbat_blocks.size()-1).setValueAt(
bigBlockSize.getXBATEntriesPerBlock(), offset
);
}
_xbat_blocks.add(xbat);
_header.setXBATCount(_xbat_blocks.size());
} else {
// Allocate our BAT in the existing XBAT with space
for(int i=0; i<bigBlockSize.getXBATEntriesPerBlock(); i++) {
if(xbat.getValueAt(i) == POIFSConstants.UNUSED_BLOCK) {
xbat.setValueAt(i, offset);
break;
}
}
}
} else {
// Store us in the header
int[] newBATs = new int[_header.getBATCount()+1];
System.arraycopy(_header.getBATArray(), 0, newBATs, 0, newBATs.length-1);
newBATs[newBATs.length-1] = offset;
_header.setBATArray(newBATs);
}
_header.setBATCount(_bat_blocks.size());
// The current offset stores us, but the next one is free
return offset+1;
}
protected long size() throws IOException {
return _data.size();
}
@Override
protected ChainLoopDetector getChainLoopDetector() throws IOException {
return new ChainLoopDetector(_data.size());
}

/**
* For unit testing only! Returns the underlying
* properties table
*/
PropertyTable _get_property_table() {
return _property_table;
}
/**
* <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
* creating from an <tt>InputStream</tt>.</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying file closed, as the file is
* kept open during normal operation to read the data out.</p>
* @param readOnly whether the POIFileSystem will only be used in read-only mode
*
* @param file the File from which to read the data
* Returns the MiniStore, which performs a similar low
* level function to this, except for the small blocks.
*/
POIFSMiniStore getMiniStore() {
return _mini_store;
}
/**
* add a new POIFSDocument to the FileSytem
*
* @exception IOException on errors reading, or on invalid data
* @param document the POIFSDocument being added
*/
void addDocument(final POIFSDocument document)
{
_property_table.addProperty(document.getDocumentProperty());
}

/**
* add a new DirectoryProperty to the FileSystem
*
* @param directory the DirectoryProperty being added
*/
public POIFSFileSystem(File file, boolean readOnly) throws IOException {
super(file, readOnly);
void addDirectory(final DirectoryProperty directory)
{
_property_table.addProperty(directory);
}

/**
* Create a new document to be added to the root directory
*
* @param stream the InputStream from which the document's data
* will be obtained
* @param name the name of the new POIFSDocument
*
* @return the new DocumentEntry
*
* @exception IOException on error creating the new POIFSDocument
*/

public DocumentEntry createDocument(final InputStream stream,
final String name)
throws IOException
{
return getRoot().createDocument(name, stream);
}

/**
* create a new DocumentEntry in the root entry; the data will be
* provided later
*
* @param name the name of the new DocumentEntry
* @param size the size of the new DocumentEntry
* @param writer the writer of the new DocumentEntry
*
* @return the new DocumentEntry
*
* @exception IOException if the writer exceeds the given size
*/
public DocumentEntry createDocument(final String name, final int size, final POIFSWriterListener writer)
throws IOException {
return getRoot().createDocument(name, size, writer);
}

/**
* create a new DirectoryEntry in the root directory
*
* @param name the name of the new DirectoryEntry
*
* @return the new DirectoryEntry
*
* @exception IOException on name duplication
*/

public DirectoryEntry createDirectory(final String name)
throws IOException
{
return getRoot().createDirectory(name);
}
/**
* <p>Creates a POIFSFileSystem from a <tt>File</tt>. This uses less memory than
* creating from an <tt>InputStream</tt>. The File will be opened read-only</p>
*
* <p>Note that with this constructor, you will need to call {@link #close()}
* when you're done to have the underlying file closed, as the file is
* kept open during normal operation to read the data out.</p>
*
* @param file the File from which to read the data
* Set the contents of a document in the root directory,
* creating if needed, otherwise updating
*
* @exception IOException on errors reading, or on invalid data
* @param stream the InputStream from which the document's data
* will be obtained
* @param name the name of the new or existing POIFSDocument
*
* @return the new or updated DocumentEntry
*
* @exception IOException on error populating the POIFSDocument
*/
public POIFSFileSystem(File file) throws IOException {
super(file);
@SuppressWarnings("UnusedReturnValue")
public DocumentEntry createOrUpdateDocument(final InputStream stream, final String name)
throws IOException {
return getRoot().createOrUpdateDocument(name, stream);
}
/**
* Does the filesystem support an in-place write via
* {@link #writeFilesystem()} ? If false, only writing out to
* a brand new file via {@link #writeFilesystem(OutputStream)}
* is supported.
*/
public boolean isInPlaceWriteable() {
return (_data instanceof FileBackedDataSource) && ((FileBackedDataSource) _data).isWriteable();
}
/**
* Write the filesystem out to the open file. Will thrown an
* {@link IllegalArgumentException} if opened from an
* {@link InputStream}.
*
* @exception IOException thrown on errors writing to the stream
*/
public void writeFilesystem() throws IOException {
if (!(_data instanceof FileBackedDataSource)) {
throw new IllegalArgumentException(
"POIFS opened from an inputstream, so writeFilesystem() may " +
"not be called. Use writeFilesystem(OutputStream) instead"
);
}
if (! ((FileBackedDataSource)_data).isWriteable()) {
throw new IllegalArgumentException(
"POIFS opened in read only mode, so writeFilesystem() may " +
"not be called. Open the FileSystem in read-write mode first"
);
}
syncWithDataSource();
}

/**
* Write the filesystem out
*
* @param stream the OutputStream to which the filesystem will be
* written
*
* @exception IOException thrown on errors writing to the stream
*/
public void writeFilesystem(final OutputStream stream) throws IOException {
// Have the datasource updated
syncWithDataSource();
// Now copy the contents to the stream
_data.copyTo(stream);
}
/**
* Has our in-memory objects write their state
* to their backing blocks
*/
private void syncWithDataSource() throws IOException {
// Mini Stream + SBATs first, as mini-stream details have
// to be stored in the Root Property
_mini_store.syncWithDataSource();
// Properties
POIFSStream propStream = new POIFSStream(this, _header.getPropertyStart());
_property_table.preWrite();
_property_table.write(propStream);
// _header.setPropertyStart has been updated on write ...
// HeaderBlock
ByteArrayOutputStream baos = new ByteArrayOutputStream(
_header.getBigBlockSize().getBigBlockSize()
);
_header.writeData(baos);
getBlockAt(-1).put(baos.toByteArray());

// BATs
for(BATBlock bat : _bat_blocks) {
ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
bat.writeData(block);
}
// XBats
for(BATBlock bat : _xbat_blocks) {
ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
bat.writeData(block);
}
}
/**
* Closes the FileSystem, freeing any underlying files, streams
* and buffers. After this, you will be unable to read or
* write from the FileSystem.
*/
public void close() throws IOException {
_data.close();
}

/**
* read in a file and write it back out again
*
* @param args names of the files; arg[ 0 ] is the input file,
* arg[ 1 ] is the output file
*/
public static void main(String args[]) throws IOException {
if (args.length != 2) {
System.err.println(
"two arguments required: input filename and output filename");
System.exit(1);
}

try (FileInputStream istream = new FileInputStream(args[0])) {
try (FileOutputStream ostream = new FileOutputStream(args[1])) {
try (POIFSFileSystem fs = new POIFSFileSystem(istream)) {
fs.writeFilesystem(ostream);
}
}
}
}

/**
* Get the root entry
*
* @return the root entry
*/
public DirectoryNode getRoot() {
if (_root == null) {
_root = new DirectoryNode(_property_table.getRoot(), this, null);
}
return _root;
}

/**
* open a document in the root entry's list of entries
*
* @param documentName the name of the document to be opened
*
* @return a newly opened DocumentInputStream
*
* @exception IOException if the document does not exist or the
* name is that of a DirectoryEntry
*/
public DocumentInputStream createDocumentInputStream(
final String documentName) throws IOException {
return getRoot().createDocumentInputStream(documentName);
}

/**
* remove an entry
*
* @param entry to be removed
*/
void remove(EntryNode entry) throws IOException {
// If it's a document, free the blocks
if (entry instanceof DocumentEntry) {
POIFSDocument doc = new POIFSDocument((DocumentProperty)entry.getProperty(), this);
doc.free();
}
// Now zap it from the properties list
_property_table.removeProperty(entry.getProperty());
}
/* ********** START begin implementation of POIFSViewable ********** */

/**
* Get an array of objects, some of which may implement
* POIFSViewable
*
* @return an array of Object; may not be null, but may be empty
*/
public Object [] getViewableArray() {
if (preferArray()) {
return getRoot().getViewableArray();
}

return new Object[ 0 ];
}

/**
* Get an Iterator of objects, some of which may implement
* POIFSViewable
*
* @return an Iterator; may not be null, but may have an empty
* back end store
*/

public Iterator<Object> getViewableIterator() {
if (!preferArray()) {
return getRoot().getViewableIterator();
}

return Collections.emptyList().iterator();
}

/**
* Give viewers a hint as to whether to call getViewableArray or
* getViewableIterator
*
* @return true if a viewer should call getViewableArray, false if
* a viewer should call getViewableIterator
*/

public boolean preferArray() {
return getRoot().preferArray();
}

/**
* Provides a short description of the object, to be used when a
* POIFSViewable object has not provided its contents.
*
* @return short description
*/

public String getShortDescription() {
return "POIFS FileSystem";
}

/* ********** END begin implementation of POIFSViewable ********** */

/**
* @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
*/
public int getBigBlockSize() {
return bigBlockSize.getBigBlockSize();
}

/**
* @return The Big Block size, normally 512 bytes, sometimes 4096 bytes
*/
@SuppressWarnings("WeakerAccess")
public POIFSBigBlockSize getBigBlockSizeDetails() {
return bigBlockSize;
}

/**
* Creates a new {@link POIFSFileSystem} in a new {@link File}.
* Use {@link #POIFSFileSystem(File)} to open an existing File,
@@ -124,23 +928,45 @@ public class POIFSFileSystem
*/
public static POIFSFileSystem create(File file) throws IOException {
// Create a new empty POIFS in the file
try (POIFSFileSystem tmp = new POIFSFileSystem()) {
try (OutputStream out = new FileOutputStream(file)) {
tmp.writeFilesystem(out);
}
try (POIFSFileSystem tmp = new POIFSFileSystem();
OutputStream out = new FileOutputStream(file)) {
tmp.writeFilesystem(out);
}
// Open it up again backed by the file
return new POIFSFileSystem(file, false);
}

/**
* read in a file and write it back out again
*
* @param args names of the files; arg[ 0 ] is the input file,
* arg[ 1 ] is the output file
*/
public static void main(String args[]) throws IOException {
NPOIFSFileSystem.main(args);
@Override
protected int getBlockStoreBlockSize() {
return getBigBlockSize();
}
@Internal
public PropertyTable getPropertyTable() {
return _property_table;
}

@Internal
public HeaderBlock getHeaderBlock() {
return _header;
}


private static void sanityCheckBlockCount(int block_count) throws IOException {
if (block_count <= 0) {
throw new IOException(
"Illegal block count; minimum count is 1, got " +
block_count + " instead"
);
}
if (block_count > MAX_BLOCK_COUNT) {
throw new IOException(
"Block count " + block_count +
" is too high. POI maximum is " + MAX_BLOCK_COUNT + "."
);
}
}

}


src/java/org/apache/poi/poifs/filesystem/NPOIFSMiniStore.java → src/java/org/apache/poi/poifs/filesystem/POIFSMiniStore.java View File

@@ -28,36 +28,35 @@ import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.property.RootProperty;
import org.apache.poi.poifs.storage.BATBlock;
import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex;
import org.apache.poi.poifs.storage.BlockAllocationTableWriter;
import org.apache.poi.poifs.storage.HeaderBlock;

/**
* This class handles the MiniStream (small block store)
* in the NIO case for {@link NPOIFSFileSystem}
* in the NIO case for {@link POIFSFileSystem}
*/
public class NPOIFSMiniStore extends BlockStore
public class POIFSMiniStore extends BlockStore
{
private NPOIFSFileSystem _filesystem;
private NPOIFSStream _mini_stream;
private POIFSFileSystem _filesystem;
private POIFSStream _mini_stream;
private List<BATBlock> _sbat_blocks;
private HeaderBlock _header;
private RootProperty _root;

protected NPOIFSMiniStore(NPOIFSFileSystem filesystem, RootProperty root,
List<BATBlock> sbats, HeaderBlock header)
POIFSMiniStore(POIFSFileSystem filesystem, RootProperty root,
List<BATBlock> sbats, HeaderBlock header)
{
this._filesystem = filesystem;
this._sbat_blocks = sbats;
this._header = header;
this._root = root;
this._mini_stream = new NPOIFSStream(filesystem, root.getStartBlock());
this._mini_stream = new POIFSStream(filesystem, root.getStartBlock());
}
/**
* Load the block at the given offset.
*/
protected ByteBuffer getBlockAt(final int offset) throws IOException {
protected ByteBuffer getBlockAt(final int offset) {
// Which big block is this?
int byteOffset = offset * POIFSConstants.SMALL_BLOCK_SIZE;
int bigBlockNumber = byteOffset / _filesystem.getBigBlockSize();
@@ -109,7 +108,7 @@ public class NPOIFSMiniStore extends BlockStore
// If we are the first block to be allocated, initialise the stream
if (firstInStore) {
_filesystem._get_property_table().getRoot().setStartBlock(newBigBlock);
_mini_stream = new NPOIFSStream(_filesystem, newBigBlock);
_mini_stream = new POIFSStream(_filesystem, newBigBlock);
} else {
// Tack it onto the end of our chain
ChainLoopDetector loopDetector = _filesystem.getChainLoopDetector();
@@ -232,7 +231,7 @@ public class NPOIFSMiniStore extends BlockStore
}
@Override
protected ChainLoopDetector getChainLoopDetector() throws IOException {
protected ChainLoopDetector getChainLoopDetector() {
return new ChainLoopDetector( _root.getSize() );
}

@@ -245,12 +244,12 @@ public class NPOIFSMiniStore extends BlockStore
* the mini-stream size in the properties. Stream size is
* based on full blocks used, not the data within the streams
*/
protected void syncWithDataSource() throws IOException {
void syncWithDataSource() throws IOException {
int blocksUsed = 0;
for (BATBlock sbat : _sbat_blocks) {
ByteBuffer block = _filesystem.getBlockAt(sbat.getOurBlockIndex());
BlockAllocationTableWriter.writeBlock(sbat, block);
sbat.writeData(block);
if (!sbat.hasFreeSectors()) {
blocksUsed += _filesystem.getBigBlockSizeDetails().getBATEntriesPerBlock();
} else {

src/java/org/apache/poi/poifs/filesystem/NPOIFSStream.java → src/java/org/apache/poi/poifs/filesystem/POIFSStream.java View File

@@ -31,7 +31,7 @@ import org.apache.poi.poifs.storage.HeaderBlock;

/**
* This handles reading and writing a stream within a
* {@link NPOIFSFileSystem}. It can supply an iterator
* {@link POIFSFileSystem}. It can supply an iterator
* to read blocks, and way to write out to existing and
* new blocks.
* Most users will want a higher level version of this,
@@ -44,7 +44,7 @@ import org.apache.poi.poifs.storage.HeaderBlock;
* TODO Implement a streaming write method, and append
*/

public class NPOIFSStream implements Iterable<ByteBuffer>
public class POIFSStream implements Iterable<ByteBuffer>
{
private BlockStore blockStore;
private int startBlock;
@@ -55,7 +55,7 @@ public class NPOIFSStream implements Iterable<ByteBuffer>
* to know how to get the start block (eg from a
* {@link HeaderBlock} or a {@link Property})
*/
public NPOIFSStream(BlockStore blockStore, int startBlock) {
public POIFSStream(BlockStore blockStore, int startBlock) {
this.blockStore = blockStore;
this.startBlock = startBlock;
}
@@ -64,7 +64,7 @@ public class NPOIFSStream implements Iterable<ByteBuffer>
* Constructor for a new stream. A start block won't
* be allocated until you begin writing to it.
*/
public NPOIFSStream(BlockStore blockStore) {
public POIFSStream(BlockStore blockStore) {
this.blockStore = blockStore;
this.startBlock = POIFSConstants.END_OF_CHAIN;
}
@@ -86,7 +86,7 @@ public class NPOIFSStream implements Iterable<ByteBuffer>
return getBlockIterator();
}
public Iterator<ByteBuffer> getBlockIterator() {
Iterator<ByteBuffer> getBlockIterator() {
if(startBlock == POIFSConstants.END_OF_CHAIN) {
throw new IllegalStateException(
"Can't read from a new stream before it has been written to"
@@ -101,7 +101,7 @@ public class NPOIFSStream implements Iterable<ByteBuffer>
* Note - if this is property based, you'll still
* need to update the size in the property yourself
*/
public void updateContents(byte[] contents) throws IOException {
void updateContents(byte[] contents) throws IOException {
OutputStream os = getOutputStream();
os.write(contents);
os.close();
@@ -143,7 +143,7 @@ public class NPOIFSStream implements Iterable<ByteBuffer>
private ChainLoopDetector loopDetector;
private int nextBlock;
protected StreamBlockByteBufferIterator(int firstBlock) {
StreamBlockByteBufferIterator(int firstBlock) {
this.nextBlock = firstBlock;
try {
this.loopDetector = blockStore.getChainLoopDetector();
@@ -153,10 +153,7 @@ public class NPOIFSStream implements Iterable<ByteBuffer>
}

public boolean hasNext() {
if(nextBlock == POIFSConstants.END_OF_CHAIN) {
return false;
}
return true;
return nextBlock != POIFSConstants.END_OF_CHAIN;
}

public ByteBuffer next() {
@@ -187,13 +184,13 @@ public class NPOIFSStream implements Iterable<ByteBuffer>
ChainLoopDetector loopDetector;
int prevBlock, nextBlock;

protected StreamBlockByteBuffer() throws IOException {
StreamBlockByteBuffer() throws IOException {
loopDetector = blockStore.getChainLoopDetector();
prevBlock = POIFSConstants.END_OF_CHAIN;
nextBlock = startBlock;
}

protected void createBlockIfNeeded() throws IOException {
void createBlockIfNeeded() throws IOException {
if (buffer != null && buffer.hasRemaining()) return;
int thisBlock = nextBlock;
@@ -228,12 +225,14 @@ public class NPOIFSStream implements Iterable<ByteBuffer>
// Update pointers
prevBlock = thisBlock;
}

@Override
public void write(int b) throws IOException {
oneByte[0] = (byte)(b & 0xFF);
write(oneByte);
}

@Override
public void write(byte[] b, int off, int len) throws IOException {
if ((off < 0) || (off > b.length) || (len < 0) ||
((off + len) > b.length) || ((off + len) < 0)) {
@@ -253,7 +252,7 @@ public class NPOIFSStream implements Iterable<ByteBuffer>
public void close() throws IOException {
// If we're overwriting, free any remaining blocks
NPOIFSStream toFree = new NPOIFSStream(blockStore, nextBlock);
POIFSStream toFree = new POIFSStream(blockStore, nextBlock);
toFree.free(loopDetector);
// Mark the end of the stream, if we have any data

+ 6
- 6
src/java/org/apache/poi/poifs/macros/VBAMacroReader.java View File

@@ -32,7 +32,7 @@ import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.FileMagic;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
import org.apache.poi.poifs.macros.Module.ModuleType;
import org.apache.poi.util.CodePageUtil;
@@ -59,13 +59,13 @@ public class VBAMacroReader implements Closeable {
protected static final String VBA_PROJECT_OOXML = "vbaProject.bin";
protected static final String VBA_PROJECT_POIFS = "VBA";

private NPOIFSFileSystem fs;
private POIFSFileSystem fs;
public VBAMacroReader(InputStream rstream) throws IOException {
InputStream is = FileMagic.prepareToCheckMagic(rstream);
FileMagic fm = FileMagic.valueOf(is);
if (fm == FileMagic.OLE2) {
fs = new NPOIFSFileSystem(is);
fs = new POIFSFileSystem(is);
} else {
openOOXML(is);
}
@@ -73,12 +73,12 @@ public class VBAMacroReader implements Closeable {
public VBAMacroReader(File file) throws IOException {
try {
this.fs = new NPOIFSFileSystem(file);
this.fs = new POIFSFileSystem(file);
} catch (OfficeXmlFileException e) {
openOOXML(new FileInputStream(file));
}
}
public VBAMacroReader(NPOIFSFileSystem fs) {
public VBAMacroReader(POIFSFileSystem fs) {
this.fs = fs;
}
@@ -89,7 +89,7 @@ public class VBAMacroReader implements Closeable {
if (endsWithIgnoreCase(zipEntry.getName(), VBA_PROJECT_OOXML)) {
try {
// Make a NPOIFS from the contents, and close the stream
this.fs = new NPOIFSFileSystem(zis);
this.fs = new POIFSFileSystem(zis);
return;
} catch (IOException e) {
// Tidy up

+ 4
- 4
src/java/org/apache/poi/poifs/property/DocumentProperty.java View File

@@ -19,14 +19,14 @@

package org.apache.poi.poifs.property;

import org.apache.poi.poifs.filesystem.NPOIFSDocument;
import org.apache.poi.poifs.filesystem.POIFSDocument;

/**
* Trivial extension of Property for POIFSDocuments
*/
public class DocumentProperty extends Property {
// the POIFSDocument this property is associated with
private NPOIFSDocument _document;
private POIFSDocument _document;

/**
* Constructor
@@ -64,7 +64,7 @@ public class DocumentProperty extends Property {
*
* @param doc the associated POIFSDocument
*/
public void setDocument(NPOIFSDocument doc)
public void setDocument(POIFSDocument doc)
{
_document = doc;
}
@@ -74,7 +74,7 @@ public class DocumentProperty extends Property {
*
* @return the associated document
*/
public NPOIFSDocument getDocument()
public POIFSDocument getDocument()
{
return _document;
}

+ 0
- 164
src/java/org/apache/poi/poifs/property/NPropertyTable.java View File

@@ -1,164 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.property;

import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.NPOIFSStream;
import org.apache.poi.poifs.storage.HeaderBlock;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;

/**
* This class embodies the Property Table for a {@link NPOIFSFileSystem};
* this is basically the directory for all of the documents in the
* filesystem.
*/
public final class NPropertyTable extends PropertyTableBase {
private static final POILogger _logger =
POILogFactory.getLogger(NPropertyTable.class);
//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 100_000;

private POIFSBigBlockSize _bigBigBlockSize;

public NPropertyTable(HeaderBlock headerBlock)
{
super(headerBlock);
_bigBigBlockSize = headerBlock.getBigBlockSize();
}

/**
* reading constructor (used when we've read in a file and we want
* to extract the property table from it). Populates the
* properties thoroughly
*
* @param headerBlock the header block of the file
* @param filesystem the filesystem to read from
*
* @exception IOException if anything goes wrong (which should be
* a result of the input being NFG)
*/
public NPropertyTable(final HeaderBlock headerBlock,
final NPOIFSFileSystem filesystem)
throws IOException
{
super(
headerBlock,
buildProperties(
(new NPOIFSStream(filesystem, headerBlock.getPropertyStart())).iterator(),
headerBlock.getBigBlockSize()
)
);
_bigBigBlockSize = headerBlock.getBigBlockSize();
}
private static List<Property> buildProperties(final Iterator<ByteBuffer> dataSource,
final POIFSBigBlockSize bigBlockSize) throws IOException
{
List<Property> properties = new ArrayList<>();
while(dataSource.hasNext()) {
ByteBuffer bb = dataSource.next();
// Turn it into an array
byte[] data;
if(bb.hasArray() && bb.arrayOffset() == 0 &&
bb.array().length == bigBlockSize.getBigBlockSize()) {
data = bb.array();
} else {
data = IOUtils.safelyAllocate(bigBlockSize.getBigBlockSize(), MAX_RECORD_LENGTH);
int toRead = data.length;
if (bb.remaining() < bigBlockSize.getBigBlockSize()) {
// Looks to be a truncated block
// This isn't allowed, but some third party created files
// sometimes do this, and we can normally read anyway
_logger.log(POILogger.WARN, "Short Property Block, ", bb.remaining(),
" bytes instead of the expected " + bigBlockSize.getBigBlockSize());
toRead = bb.remaining();
}
bb.get(data, 0, toRead);
}
PropertyFactory.convertToProperties(data, properties);
}
return properties;
}

/**
* Return the number of BigBlock's this instance uses
*
* @return count of BigBlock instances
*/
public int countBlocks()
{
long rawSize = _properties.size() * (long)POIFSConstants.PROPERTY_SIZE;
int blkSize = _bigBigBlockSize.getBigBlockSize();
int numBlocks = (int)(rawSize / blkSize);
if ((rawSize % blkSize) != 0) {
numBlocks++;
}
return numBlocks;
}
/**
* Prepare to be written
*/
public void preWrite() {
List<Property> pList = new ArrayList<>();
// give each property its index
int i=0;
for (Property p : _properties) {
// only handle non-null properties
if (p == null) continue;
p.setIndex(i++);
pList.add(p);
}

// prepare each property for writing
for (Property p : pList) p.preWrite();
}
/**
* Writes the properties out into the given low-level stream
*/
public void write(NPOIFSStream stream) throws IOException {
OutputStream os = stream.getOutputStream();
for(Property property : _properties) {
if(property != null) {
property.writeData(os);
}
}
os.close();
// Update the start position if needed
if(getStartBlock() != stream.getStartBlock()) {
setStartBlock(stream.getStartBlock());
}
}
}

+ 3
- 30
src/java/org/apache/poi/poifs/property/PropertyFactory.java View File

@@ -19,12 +19,9 @@

package org.apache.poi.poifs.property;

import java.io.IOException;

import java.util.*;
import java.util.List;

import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.storage.ListManagedBlock;

/**
* Factory for turning an array of RawDataBlock instances containing
@@ -38,37 +35,13 @@ import org.apache.poi.poifs.storage.ListManagedBlock;
* @author Marc Johnson (mjohnson at apache dot org)
*/

class PropertyFactory {
final class PropertyFactory {
// no need for an accessible constructor
private PropertyFactory()
{
}

/**
* Convert raw data blocks to an array of Property's
*
* @param blocks to be converted
*
* @return the converted List of Property objects. May contain
* nulls, but will not be null
*
* @exception IOException if any of the blocks are empty
*/
static List<Property> convertToProperties(ListManagedBlock [] blocks)
throws IOException
{
List<Property> properties = new ArrayList<>();

for (ListManagedBlock block : blocks) {
byte[] data = block.getData();
convertToProperties(data, properties);
}
return properties;
}
static void convertToProperties(byte[] data, List<Property> properties)
throws IOException
{
static void convertToProperties(byte[] data, List<Property> properties) {
int property_count = data.length / POIFSConstants.PROPERTY_SIZE;
int offset = 0;


+ 190
- 57
src/java/org/apache/poi/poifs/property/PropertyTable.java View File

@@ -19,29 +19,43 @@ package org.apache.poi.poifs.property;

import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;

import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.storage.BlockWritable;
import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.filesystem.BATManaged;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSStream;
import org.apache.poi.poifs.storage.HeaderBlock;
import org.apache.poi.poifs.storage.PropertyBlock;
import org.apache.poi.poifs.storage.RawDataBlockList;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;

/**
* This class embodies the Property Table for the {@link org.apache.poi.poifs.filesystem.POIFSFileSystem};
* this is basically the directory for all of the documents in the
* filesystem.
*
* @author Marc Johnson (mjohnson at apache dot org)
* This class embodies the Property Table for a {@link POIFSFileSystem};
* this is basically the directory for all of the documents in the
* filesystem and looks up entries in the filesystem to their
* chain of blocks.
*/
public final class PropertyTable extends PropertyTableBase implements BlockWritable {
private POIFSBigBlockSize _bigBigBlockSize;
private BlockWritable[] _blocks;
public final class PropertyTable implements BATManaged {
private static final POILogger _logger =
POILogFactory.getLogger(PropertyTable.class);

//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 100_000;

private final HeaderBlock _header_block;
private final List<Property> _properties = new ArrayList<>();
private final POIFSBigBlockSize _bigBigBlockSize;

public PropertyTable(HeaderBlock headerBlock)
{
super(headerBlock);
_header_block = headerBlock;
_bigBigBlockSize = headerBlock.getBigBlockSize();
_blocks = null;
addProperty(new RootProperty());
}

/**
@@ -50,75 +64,194 @@ public final class PropertyTable extends PropertyTableBase implements BlockWrita
* properties thoroughly
*
* @param headerBlock the header block of the file
* @param blockList the list of blocks
* @param filesystem the filesystem to read from
*
* @exception IOException if anything goes wrong (which should be
* a result of the input being NFG)
*/
public PropertyTable(final HeaderBlock headerBlock,
final RawDataBlockList blockList)
throws IOException
{
super(
public PropertyTable(final HeaderBlock headerBlock, final POIFSFileSystem filesystem)
throws IOException {
this(
headerBlock,
PropertyFactory.convertToProperties(
blockList.fetchBlocks(headerBlock.getPropertyStart(), -1)
)
new POIFSStream(filesystem, headerBlock.getPropertyStart())
);
}

/* only invoked locally and from the junit tests */
PropertyTable(final HeaderBlock headerBlock, final Iterable<ByteBuffer> dataSource)
throws IOException {
_header_block = headerBlock;
_bigBigBlockSize = headerBlock.getBigBlockSize();
_blocks = null;

for (ByteBuffer bb : dataSource) {
// Turn it into an array
byte[] data;
if (bb.hasArray() && bb.arrayOffset() == 0 &&
bb.array().length == _bigBigBlockSize.getBigBlockSize()) {
data = bb.array();
} else {
data = IOUtils.safelyAllocate(_bigBigBlockSize.getBigBlockSize(), MAX_RECORD_LENGTH);

int toRead = data.length;
if (bb.remaining() < _bigBigBlockSize.getBigBlockSize()) {
// Looks to be a truncated block
// This isn't allowed, but some third party created files
// sometimes do this, and we can normally read anyway
_logger.log(POILogger.WARN, "Short Property Block, ", bb.remaining(),
" bytes instead of the expected " + _bigBigBlockSize.getBigBlockSize());
toRead = bb.remaining();
}

bb.get(data, 0, toRead);
}

PropertyFactory.convertToProperties(data, _properties);
}

populatePropertyTree( (DirectoryProperty)_properties.get(0));
}


/**
* Prepare to be written
* Add a property to the list of properties we manage
*
* @param property the new Property to manage
*/
public void preWrite()
{
Property[] properties = _properties.toArray(new Property[_properties.size()]);
public void addProperty(Property property) {
_properties.add(property);
}

// give each property its index
for (int k = 0; k < properties.length; k++)
{
properties[ k ].setIndex(k);
}
/**
* Remove a property from the list of properties we manage
*
* @param property the Property to be removed
*/
public void removeProperty(final Property property) {
_properties.remove(property);
}

// allocate the blocks for the property table
_blocks = PropertyBlock.createPropertyBlockArray(_bigBigBlockSize, _properties);
/**
* Get the root property
*
* @return the root property
*/
public RootProperty getRoot() {
// it's always the first element in the List
return ( RootProperty ) _properties.get(0);
}

// prepare each property for writing
for (Property property : properties) {
property.preWrite();
}
/**
* Get the start block for the property table
*
* @return start block index
*/
public int getStartBlock() {
return _header_block.getPropertyStart();
}

/**
* Set the start block for this instance
*
* @param index index into the array of BigBlock instances making
* up the the filesystem
*/
public void setStartBlock(final int index) {
_header_block.setPropertyStart(index);
}



/**
* Return the number of BigBlock's this instance uses
*
* @return count of BigBlock instances
*/
public int countBlocks()
{
return (_blocks == null) ? 0
: _blocks.length;
public int countBlocks() {
long rawSize = _properties.size() * (long)POIFSConstants.PROPERTY_SIZE;
int blkSize = _bigBigBlockSize.getBigBlockSize();
int numBlocks = (int)(rawSize / blkSize);
if ((rawSize % blkSize) != 0) {
numBlocks++;
}
return numBlocks;
}
/**
* Prepare to be written
*/
public void preWrite() {
List<Property> pList = new ArrayList<>();
// give each property its index
int i=0;
for (Property p : _properties) {
// only handle non-null properties
if (p == null) continue;
p.setIndex(i++);
pList.add(p);
}

// prepare each property for writing
for (Property p : pList) p.preWrite();
}
/**
* Write the storage to an OutputStream
*
* @param stream the OutputStream to which the stored data should
* be written
*
* @exception IOException on problems writing to the specified
* stream
* Writes the properties out into the given low-level stream
*/
public void writeBlocks(final OutputStream stream)
throws IOException
{
if (_blocks != null)
{
for (BlockWritable _block : _blocks) {
_block.writeBlocks(stream);
public void write(POIFSStream stream) throws IOException {
OutputStream os = stream.getOutputStream();
for(Property property : _properties) {
if(property != null) {
property.writeData(os);
}
}
os.close();
// Update the start position if needed
if(getStartBlock() != stream.getStartBlock()) {
setStartBlock(stream.getStartBlock());
}
}

private void populatePropertyTree(DirectoryProperty root) throws IOException {
int index = root.getChildIndex();

if (!Property.isValidIndex(index)) {
// property has no children
return;
}

final Stack<Property> children = new Stack<>();
children.push(_properties.get(index));
while (!children.empty()) {
Property property = children.pop();
if (property == null) {
// unknown / unsupported / corrupted property, skip
continue;
}

root.addChild(property);
if (property.isDirectory()) {
populatePropertyTree(( DirectoryProperty ) property);
}
index = property.getPreviousChildIndex();
if (isValidIndex(index)) {
children.push(_properties.get(index));
}
index = property.getNextChildIndex();
if (isValidIndex(index)) {
children.push(_properties.get(index));
}
}
}

private boolean isValidIndex(int index) {
if (! Property.isValidIndex(index))
return false;
if (index < 0 || index >= _properties.size()) {
_logger.log(POILogger.WARN, "Property index " + index +
"outside the valid range 0.."+_properties.size());
return false;
}
return true;
}
}

+ 0
- 174
src/java/org/apache/poi/poifs/property/PropertyTableBase.java View File

@@ -1,174 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.property;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;

import org.apache.poi.poifs.filesystem.BATManaged;
import org.apache.poi.poifs.storage.HeaderBlock;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;

/**
* This class embodies the Property Table for the filesystem,
* which looks up entries in the filesystem to their
* chain of blocks.
* This is the core support, there are implementations
* for the different block schemes as needed.
*/
public abstract class PropertyTableBase implements BATManaged {
private static final POILogger _logger =
POILogFactory.getLogger(PropertyTableBase.class);
private final HeaderBlock _header_block;
protected final List<Property> _properties;

public PropertyTableBase(final HeaderBlock header_block)
{
_header_block = header_block;
_properties = new ArrayList<>();
addProperty(new RootProperty());
}

/**
* Reading constructor (used when we've read in a file and we want
* to extract the property table from it). Populates the
* properties thoroughly
*
* @param header_block the first block to read from
* @param properties the list to populate
*
* @exception IOException if anything goes wrong (which should be
* a result of the input being NFG)
*/
public PropertyTableBase(final HeaderBlock header_block,
final List<Property> properties)
throws IOException
{
_header_block = header_block;
_properties = properties;
populatePropertyTree( (DirectoryProperty)_properties.get(0));
}

/**
* Add a property to the list of properties we manage
*
* @param property the new Property to manage
*/
public void addProperty(Property property)
{
_properties.add(property);
}

/**
* Remove a property from the list of properties we manage
*
* @param property the Property to be removed
*/
public void removeProperty(final Property property)
{
_properties.remove(property);
}

/**
* Get the root property
*
* @return the root property
*/
public RootProperty getRoot()
{
// it's always the first element in the List
return ( RootProperty ) _properties.get(0);
}
private void populatePropertyTree(DirectoryProperty root)
throws IOException
{
int index = root.getChildIndex();

if (!Property.isValidIndex(index))
{

// property has no children
return;
}
Stack<Property> children = new Stack<>();

children.push(_properties.get(index));
while (!children.empty())
{
Property property = children.pop();
if (property == null)
{
// unknown / unsupported / corrupted property, skip
continue;
}

root.addChild(property);
if (property.isDirectory())
{
populatePropertyTree(( DirectoryProperty ) property);
}
index = property.getPreviousChildIndex();
if (isValidIndex(index))
{
children.push(_properties.get(index));
}
index = property.getNextChildIndex();
if (isValidIndex(index))
{
children.push(_properties.get(index));
}
}
}
protected boolean isValidIndex(int index) {
if (! Property.isValidIndex(index))
return false;
if (index < 0 || index >= _properties.size()) {
_logger.log(POILogger.WARN, "Property index " + index +
"outside the valid range 0.."+_properties.size());
return false;
}
return true;
}

/**
* Get the start block for the property table
*
* @return start block index
*/
public int getStartBlock()
{
return _header_block.getPropertyStart();
}

/**
* Set the start block for this instance
*
* @param index index into the array of BigBlock instances making
* up the the filesystem
*/
public void setStartBlock(final int index)
{
_header_block.setPropertyStart(index);
}
}

+ 31
- 167
src/java/org/apache/poi/poifs/storage/BATBlock.java View File

@@ -31,7 +31,13 @@ import org.apache.poi.util.LittleEndian;
* A block of block allocation table entries. BATBlocks are created
* only through a static factory method: createBATBlocks.
*/
public final class BATBlock extends BigBlock {
public final class BATBlock implements BlockWritable {
/**
* Either 512 bytes ({@link POIFSConstants#SMALLER_BIG_BLOCK_SIZE})
* or 4096 bytes ({@link POIFSConstants#LARGER_BIG_BLOCK_SIZE})
*/
private POIFSBigBlockSize bigBlockSize;

/**
* For a regular fat block, these are 128 / 1024
* next sector values.
@@ -55,7 +61,7 @@ public final class BATBlock extends BigBlock {
*/
private BATBlock(POIFSBigBlockSize bigBlockSize)
{
super(bigBlockSize);
this.bigBlockSize = bigBlockSize;
int _entries_per_block = bigBlockSize.getBATEntriesPerBlock();
_values = new int[_entries_per_block];
@@ -64,39 +70,14 @@ public final class BATBlock extends BigBlock {
Arrays.fill(_values, POIFSConstants.UNUSED_BLOCK);
}

/**
* Create a single instance initialized (perhaps partially) with entries
*
* @param entries the array of block allocation table entries
* @param start_index the index of the first entry to be written
* to the block
* @param end_index the index, plus one, of the last entry to be
* written to the block (writing is for all index
* k, start_index <= k < end_index)
*/

private BATBlock(POIFSBigBlockSize bigBlockSize, final int [] entries,
final int start_index, final int end_index)
{
this(bigBlockSize);
for (int k = start_index; k < end_index; k++) {
_values[k - start_index] = entries[k];
}
// Do we have any free sectors?
if(end_index - start_index == _values.length) {
recomputeFree();
}
}
private void recomputeFree() {
boolean hasFree = false;
for(int k=0; k<_values.length; k++) {
if(_values[k] == POIFSConstants.UNUSED_BLOCK) {
hasFree = true;
break;
}
}
for (int _value : _values) {
if (_value == POIFSConstants.UNUSED_BLOCK) {
hasFree = true;
break;
}
}
_has_free_sectors = hasFree;
}

@@ -127,108 +108,12 @@ public final class BATBlock extends BigBlock {
public static BATBlock createEmptyBATBlock(final POIFSBigBlockSize bigBlockSize, boolean isXBAT) {
BATBlock block = new BATBlock(bigBlockSize);
if(isXBAT) {
block.setXBATChain(bigBlockSize, POIFSConstants.END_OF_CHAIN);
final int _entries_per_xbat_block = bigBlockSize.getXBATEntriesPerBlock();
block._values[ _entries_per_xbat_block ] = POIFSConstants.END_OF_CHAIN;
}
return block;
}

/**
* Create an array of BATBlocks from an array of int block
* allocation table entries
*
* @param entries the array of int entries
*
* @return the newly created array of BATBlocks
*/
public static BATBlock [] createBATBlocks(final POIFSBigBlockSize bigBlockSize, final int [] entries)
{
int block_count = calculateStorageRequirements(bigBlockSize, entries.length);
BATBlock[] blocks = new BATBlock[ block_count ];
int index = 0;
int remaining = entries.length;

int _entries_per_block = bigBlockSize.getBATEntriesPerBlock();
for (int j = 0; j < entries.length; j += _entries_per_block)
{
blocks[ index++ ] = new BATBlock(bigBlockSize, entries, j,
(remaining > _entries_per_block)
? j + _entries_per_block
: entries.length);
remaining -= _entries_per_block;
}
return blocks;
}
/**
* Create an array of XBATBlocks from an array of int block
* allocation table entries
*
* @param entries the array of int entries
* @param startBlock the start block of the array of XBAT blocks
*
* @return the newly created array of BATBlocks
*/

public static BATBlock [] createXBATBlocks(final POIFSBigBlockSize bigBlockSize,
final int [] entries,
final int startBlock)
{
int block_count =
calculateXBATStorageRequirements(bigBlockSize, entries.length);
BATBlock[] blocks = new BATBlock[ block_count ];
int index = 0;
int remaining = entries.length;

int _entries_per_xbat_block = bigBlockSize.getXBATEntriesPerBlock();
if (block_count != 0)
{
for (int j = 0; j < entries.length; j += _entries_per_xbat_block)
{
blocks[ index++ ] =
new BATBlock(bigBlockSize, entries, j,
(remaining > _entries_per_xbat_block)
? j + _entries_per_xbat_block
: entries.length);
remaining -= _entries_per_xbat_block;
}
for (index = 0; index < blocks.length - 1; index++)
{
blocks[ index ].setXBATChain(bigBlockSize, startBlock + index + 1);
}
blocks[ index ].setXBATChain(bigBlockSize, POIFSConstants.END_OF_CHAIN);
}
return blocks;
}

/**
* Calculate how many BATBlocks are needed to hold a specified
* number of BAT entries.
*
* @param entryCount the number of entries
*
* @return the number of BATBlocks needed
*/
public static int calculateStorageRequirements(final POIFSBigBlockSize bigBlockSize, final int entryCount)
{
int _entries_per_block = bigBlockSize.getBATEntriesPerBlock();
return (entryCount + _entries_per_block - 1) / _entries_per_block;
}

/**
* Calculate how many XBATBlocks are needed to hold a specified
* number of BAT entries.
*
* @param entryCount the number of entries
*
* @return the number of XBATBlocks needed
*/
public static int calculateXBATStorageRequirements(final POIFSBigBlockSize bigBlockSize, final int entryCount)
{
int _entries_per_xbat_block = bigBlockSize.getXBATEntriesPerBlock();
return (entryCount + _entries_per_xbat_block - 1)
/ _entries_per_xbat_block;
}
/**
* Calculates the maximum size of a file which is addressable given the
* number of FAT (BAT) sectors specified. (We don't care if those BAT
@@ -280,19 +165,7 @@ public final class BATBlock extends BigBlock {
*/
public static BATBlockAndIndex getSBATBlockAndIndex(final int offset,
final HeaderBlock header, final List<BATBlock> sbats) {
POIFSBigBlockSize bigBlockSize = header.getBigBlockSize();
int entriesPerBlock = bigBlockSize.getBATEntriesPerBlock();
// SBATs are so much easier, as they're chained streams
int whichSBAT = offset / entriesPerBlock;
int index = offset % entriesPerBlock;
return new BATBlockAndIndex( index, sbats.get(whichSBAT) );
}
private void setXBATChain(final POIFSBigBlockSize bigBlockSize, int chainIndex)
{
int _entries_per_xbat_block = bigBlockSize.getXBATEntriesPerBlock();
_values[ _entries_per_xbat_block ] = chainIndex;
return getBATBlockAndIndex(offset, header, sbats);
}
/**
@@ -354,10 +227,7 @@ public final class BATBlock extends BigBlock {
return ourBlockIndex;
}


/* ********** START extension of BigBlock ********** */

/**
/**
* Write the block's data to an OutputStream
*
* @param stream the OutputStream to which the stored data should
@@ -366,16 +236,13 @@ public final class BATBlock extends BigBlock {
* @exception IOException on problems writing to the specified
* stream
*/
void writeData(final OutputStream stream)
throws IOException
{
// Save it out
stream.write( serialize() );

public void writeBlocks(final OutputStream stream) throws IOException {
// Save it out
stream.write( serialize() );
}
void writeData(final ByteBuffer block)
throws IOException
{

public void writeData(final ByteBuffer block) {
// Save it out
block.put( serialize() );
}
@@ -384,21 +251,18 @@ public final class BATBlock extends BigBlock {
// Create the empty array
byte[] data = new byte[ bigBlockSize.getBigBlockSize() ];
// Fill in the values
int offset = 0;
for(int i=0; i<_values.length; i++) {
LittleEndian.putInt(data, offset, _values[i]);
offset += LittleEndian.INT_SIZE;
}
// Fill in the values
int offset = 0;
for (int _value : _values) {
LittleEndian.putInt(data, offset, _value);
offset += LittleEndian.INT_SIZE;
}
// Done
return data;
}

/* ********** END extension of BigBlock ********** */
public static class BATBlockAndIndex {
public static final class BATBlockAndIndex {
private final int index;
private final BATBlock block;
private BATBlockAndIndex(int index, BATBlock block) {

+ 0
- 103
src/java/org/apache/poi/poifs/storage/BigBlock.java View File

@@ -1,103 +0,0 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

/**
* Abstract base class of all POIFS block storage classes. All
* extensions of BigBlock should write 512 or 4096 bytes of data when
* requested to write their data (as per their BigBlockSize).
*
* This class has package scope, as there is no reason at this time to
* make the class public.
*
* @author Marc Johnson (mjohnson at apache dot org)
*/

import java.io.IOException;
import java.io.OutputStream;

import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;

abstract class BigBlock
implements BlockWritable
{
/**
* Either 512 bytes ({@link POIFSConstants#SMALLER_BIG_BLOCK_SIZE})
* or 4096 bytes ({@link POIFSConstants#LARGER_BIG_BLOCK_SIZE})
*/
protected POIFSBigBlockSize bigBlockSize;
protected BigBlock(POIFSBigBlockSize bigBlockSize) {
this.bigBlockSize = bigBlockSize;
}

/**
* Default implementation of write for extending classes that
* contain their data in a simple array of bytes.
*
* @param stream the OutputStream to which the data should be
* written.
* @param data the byte array of to be written.
*
* @exception IOException on problems writing to the specified
* stream.
*/

protected void doWriteData(final OutputStream stream, final byte [] data)
throws IOException
{
stream.write(data);
}

/**
* Write the block's data to an OutputStream
*
* @param stream the OutputStream to which the stored data should
* be written
*
* @exception IOException on problems writing to the specified
* stream
*/

abstract void writeData(final OutputStream stream)
throws IOException;

/* ********** START implementation of BlockWritable ********** */

/**
* Write the storage to an OutputStream
*
* @param stream the OutputStream to which the stored data should
* be written
*
* @exception IOException on problems writing to the specified
* stream
*/

public void writeBlocks(final OutputStream stream)
throws IOException
{
writeData(stream);
}

/* ********** END implementation of BlockWritable ********** */
} // end abstract class BigBlock


+ 0
- 320
src/java/org/apache/poi/poifs/storage/BlockAllocationTableReader.java View File

@@ -1,320 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import java.io.IOException;

import java.util.*;

import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.util.*;

/**
* This class manages and creates the Block Allocation Table, which is
* basically a set of linked lists of block indices.
* <P>
* Each block of the filesystem has an index. The first block, the
* header, is skipped; the first block after the header is index 0,
* the next is index 1, and so on.
* <P>
* A block's index is also its index into the Block Allocation
* Table. The entry that it finds in the Block Allocation Table is the
* index of the next block in the linked list of blocks making up a
* file, or it is set to -2: end of list.
*
* @author Marc Johnson (mjohnson at apache dot org)
*/
public final class BlockAllocationTableReader {
private static final POILogger _logger = POILogFactory.getLogger(BlockAllocationTableReader.class);

/**
* Maximum number size (in blocks) of the allocation table as supported by
* POI.<br>
*
* This constant has been chosen to help POI identify corrupted data in the
* header block (rather than crash immediately with {@link OutOfMemoryError}
* ). It's not clear if the compound document format actually specifies any
* upper limits. For files with 512 byte blocks, having an allocation table
* of 65,335 blocks would correspond to a total file size of 4GB. Needless
* to say, POI probably cannot handle files anywhere near that size.
*/
private static final int MAX_BLOCK_COUNT = 65535;
private final IntList _entries;
private POIFSBigBlockSize bigBlockSize;

/**
* create a BlockAllocationTableReader for an existing filesystem. Side
* effect: when this method finishes, the BAT blocks will have
* been removed from the raw block list, and any blocks labeled as
* 'unused' in the block allocation table will also have been
* removed from the raw block list.
*
* @param block_count the number of BAT blocks making up the block
* allocation table
* @param block_array the array of BAT block indices from the
* filesystem's header
* @param xbat_count the number of XBAT blocks
* @param xbat_index the index of the first XBAT block
* @param raw_block_list the list of RawDataBlocks
*
* @exception IOException if, in trying to create the table, we
* encounter logic errors
*/
public BlockAllocationTableReader(POIFSBigBlockSize bigBlockSize, int block_count, int [] block_array,
int xbat_count, int xbat_index, BlockList raw_block_list) throws IOException {
this(bigBlockSize);
sanityCheckBlockCount(block_count);

// We want to get the whole of the FAT table
// To do this:
// * Work through raw_block_list, which points to the
// first (up to) 109 BAT blocks
// * Jump to the XBAT offset, and read in XBATs which
// point to more BAT blocks
int limit = Math.min(block_count, block_array.length);
int block_index;
// This will hold all of the BAT blocks in order
RawDataBlock blocks[] = new RawDataBlock[ block_count ];

// Process the first (up to) 109 BAT blocks
for (block_index = 0; block_index < limit; block_index++)
{
// Check that the sector number of the BAT block is a valid one
int nextOffset = block_array[ block_index ];
if(nextOffset > raw_block_list.blockCount()) {
throw new IOException("Your file contains " + raw_block_list.blockCount() +
" sectors, but the initial DIFAT array at index " + block_index +
" referenced block # " + nextOffset + ". This isn't allowed and " +
" your file is corrupt");
}
// Record the sector number of this BAT block
blocks[ block_index ] =
( RawDataBlock ) raw_block_list.remove(nextOffset);
}
// Process additional BAT blocks via the XBATs
if (block_index < block_count)
{

// must have extended blocks
if (xbat_index < 0)
{
throw new IOException(
"BAT count exceeds limit, yet XBAT index indicates no valid entries");
}
int chain_index = xbat_index;
int max_entries_per_block = bigBlockSize.getXBATEntriesPerBlock();
int chain_index_offset = bigBlockSize.getNextXBATChainOffset();

// Each XBAT block contains either:
// (maximum number of sector indexes) + index of next XBAT
// some sector indexes + FREE sectors to max # + EndOfChain
for (int j = 0; j < xbat_count; j++)
{
limit = Math.min(block_count - block_index,
max_entries_per_block);
byte[] data = raw_block_list.remove(chain_index).getData();
int offset = 0;

for (int k = 0; k < limit; k++)
{
blocks[ block_index++ ] =
( RawDataBlock ) raw_block_list
.remove(LittleEndian.getInt(data, offset));
offset += LittleEndianConsts.INT_SIZE;
}
chain_index = LittleEndian.getInt(data, chain_index_offset);
if (chain_index == POIFSConstants.END_OF_CHAIN)
{
break;
}
}
}
if (block_index != block_count)
{
throw new IOException("Could not find all blocks");
}

// Now that we have all of the raw data blocks which make
// up the FAT, go through and create the indices
setEntries(blocks, raw_block_list);
}

/**
* create a BlockAllocationTableReader from an array of raw data blocks
*
* @param blocks the raw data
* @param raw_block_list the list holding the managed blocks
*
* @exception IOException
*/
BlockAllocationTableReader(POIFSBigBlockSize bigBlockSize, ListManagedBlock[] blocks, BlockList raw_block_list)
throws IOException {
this(bigBlockSize);
setEntries(blocks, raw_block_list);
}

BlockAllocationTableReader(POIFSBigBlockSize bigBlockSize) {
this.bigBlockSize = bigBlockSize;
_entries = new IntList();
}
public static void sanityCheckBlockCount(int block_count) throws IOException {
if (block_count <= 0) {
throw new IOException(
"Illegal block count; minimum count is 1, got " +
block_count + " instead"
);
}
if (block_count > MAX_BLOCK_COUNT) {
throw new IOException(
"Block count " + block_count +
" is too high. POI maximum is " + MAX_BLOCK_COUNT + "."
);
}
}

/**
* walk the entries from a specified point and return the
* associated blocks. The associated blocks are removed from the
* block list
*
* @param startBlock the first block in the chain
* @param blockList the raw data block list
*
* @return array of ListManagedBlocks, in their correct order
*
* @exception IOException if there is a problem acquiring the blocks
*/
ListManagedBlock[] fetchBlocks(int startBlock, int headerPropertiesStartBlock,
BlockList blockList) throws IOException {
List<ListManagedBlock> blocks = new ArrayList<>();
int currentBlock = startBlock;
boolean firstPass = true;
ListManagedBlock dataBlock = null;

// Process the chain from the start to the end
// Normally we have header, data, end
// Sometimes we have data, header, end
// For those cases, stop at the header, not the end
while (currentBlock != POIFSConstants.END_OF_CHAIN) {
try {
// Grab the data at the current block offset
dataBlock = blockList.remove(currentBlock);
blocks.add(dataBlock);
// Now figure out which block we go to next
currentBlock = _entries.get(currentBlock);
firstPass = false;
} catch(IOException e) {
if(currentBlock == headerPropertiesStartBlock) {
// Special case where things are in the wrong order
_logger.log(POILogger.WARN, "Warning, header block comes after data blocks in POIFS block listing");
currentBlock = POIFSConstants.END_OF_CHAIN;
} else if(currentBlock == 0 && firstPass) {
// Special case where the termination isn't done right
// on an empty set
_logger.log(POILogger.WARN, "Warning, incorrectly terminated empty data blocks in POIFS block listing (should end at -2, ended at 0)");
currentBlock = POIFSConstants.END_OF_CHAIN;
} else {
// Ripple up
throw e;
}
}
}

return blocks.toArray(new ListManagedBlock[blocks.size()]);
}

// methods for debugging reader

/**
* determine whether the block specified by index is used or not
*
* @param index index of block in question
*
* @return true if the specific block is used, else false
*/
boolean isUsed(int index) {

try {
return _entries.get(index) != -1;
} catch (IndexOutOfBoundsException e) {
// ignored
return false;
}
}

/**
* return the next block index
*
* @param index of the current block
*
* @return index of the next block (may be
* POIFSConstants.END_OF_CHAIN, indicating end of chain
* (duh))
*
* @exception IOException if the current block is unused
*/
int getNextBlockIndex(int index) throws IOException {
if (isUsed(index)) {
return _entries.get(index);
}
throw new IOException("index " + index + " is unused");
}

/**
* Convert an array of blocks into a set of integer indices
*
* @param blocks the array of blocks containing the indices
* @param raw_blocks the list of blocks being managed. Unused
* blocks will be eliminated from the list
*/
private void setEntries(ListManagedBlock[] blocks, BlockList raw_blocks) throws IOException {
int limit = bigBlockSize.getBATEntriesPerBlock();

for (int block_index = 0; block_index < blocks.length; block_index++)
{
byte[] data = blocks[ block_index ].getData();
int offset = 0;

for (int k = 0; k < limit; k++)
{
int entry = LittleEndian.getInt(data, offset);

if (entry == POIFSConstants.UNUSED_BLOCK)
{
raw_blocks.zap(_entries.size());
}
_entries.add(entry);
offset += LittleEndianConsts.INT_SIZE;
}

// discard block
blocks[ block_index ] = null;
}
raw_blocks.setBAT(this);
}

@Internal
public IntList getEntries() {
return _entries;
}
}

+ 0
- 186
src/java/org/apache/poi/poifs/storage/BlockAllocationTableWriter.java View File

@@ -1,186 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;

import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.poifs.filesystem.BATManaged;
import org.apache.poi.util.IntList;

/**
* This class manages and creates the Block Allocation Table, which is
* basically a set of linked lists of block indices.
* <P>
* Each block of the filesystem has an index. The first block, the
* header, is skipped; the first block after the header is index 0,
* the next is index 1, and so on.
* <P>
* A block's index is also its index into the Block Allocation
* Table. The entry that it finds in the Block Allocation Table is the
* index of the next block in the linked list of blocks making up a
* file, or it is set to -2: end of list.
*
* @author Marc Johnson (mjohnson at apache dot org)
*/
public final class BlockAllocationTableWriter implements BlockWritable, BATManaged {
private IntList _entries;
private BATBlock[] _blocks;
private int _start_block;
private POIFSBigBlockSize _bigBlockSize;

/**
* create a BlockAllocationTableWriter
*/
public BlockAllocationTableWriter(POIFSBigBlockSize bigBlockSize)
{
_bigBlockSize = bigBlockSize;
_start_block = POIFSConstants.END_OF_CHAIN;
_entries = new IntList();
_blocks = new BATBlock[ 0 ];
}

/**
* Create the BATBlocks we need
*
* @return start block index of BAT blocks
*/
public int createBlocks()
{
int xbat_blocks = 0;
int bat_blocks = 0;

while (true)
{
int calculated_bat_blocks =
BATBlock.calculateStorageRequirements(_bigBlockSize,
bat_blocks
+ xbat_blocks
+ _entries.size());
int calculated_xbat_blocks =
HeaderBlockWriter.calculateXBATStorageRequirements(
_bigBlockSize, calculated_bat_blocks);

if ((bat_blocks == calculated_bat_blocks)
&& (xbat_blocks == calculated_xbat_blocks))
{

// stable ... we're OK
break;
}
bat_blocks = calculated_bat_blocks;
xbat_blocks = calculated_xbat_blocks;
}
int startBlock = allocateSpace(bat_blocks);

allocateSpace(xbat_blocks);
simpleCreateBlocks();
return startBlock;
}

/**
* Allocate space for a block of indices
*
* @param blockCount the number of blocks to allocate space for
*
* @return the starting index of the blocks
*/
public int allocateSpace(final int blockCount)
{
int startBlock = _entries.size();

if (blockCount > 0)
{
int limit = blockCount - 1;
int index = startBlock + 1;

for (int k = 0; k < limit; k++)
{
_entries.add(index++);
}
_entries.add(POIFSConstants.END_OF_CHAIN);
}
return startBlock;
}

/**
* get the starting block
*
* @return the starting block index
*/
public int getStartBlock()
{
return _start_block;
}

/**
* create the BATBlocks
*/
void simpleCreateBlocks()
{
_blocks = BATBlock.createBATBlocks(_bigBlockSize, _entries.toArray());
}

/**
* Write the storage to an OutputStream
*
* @param stream the OutputStream to which the stored data should
* be written
*
* @exception IOException on problems writing to the specified
* stream
*/
public void writeBlocks(final OutputStream stream)
throws IOException
{
for (int j = 0; j < _blocks.length; j++)
{
_blocks[ j ].writeBlocks(stream);
}
}
/**
* Write the BAT into its associated block
*/
public static void writeBlock(final BATBlock bat, final ByteBuffer block)
throws IOException
{
bat.writeData(block);
}

/**
* Return the number of BigBlock's this instance uses
*
* @return count of BigBlock instances
*/
public int countBlocks()
{
return _blocks.length;
}

/**
* Set the start block for this instance
*/
public void setStartBlock(int start_block)
{
_start_block = start_block;
}
}

+ 0
- 83
src/java/org/apache/poi/poifs/storage/BlockList.java View File

@@ -1,83 +0,0 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import java.io.IOException;

/**
* Interface for lists of blocks that are mapped by block allocation
* tables
*
* @author Marc Johnson (mjohnson at apache dot org
*/

public interface BlockList
{

/**
* remove the specified block from the list
*
* @param index the index of the specified block; if the index is
* out of range, that's ok
*/

void zap(final int index);

/**
* remove and return the specified block from the list
*
* @param index the index of the specified block
*
* @return the specified block
*
* @exception IOException if the index is out of range or has
* already been removed
*/

ListManagedBlock remove(final int index) throws IOException;

/**
* get the blocks making up a particular stream in the list. The
* blocks are removed from the list.
*
* @param startBlock the index of the first block in the stream
* @param headerPropertiesStartBlock the index of the first header block in the stream
*
* @return the stream as an array of correctly ordered blocks
*
* @exception IOException if blocks are missing
*/

ListManagedBlock [] fetchBlocks(final int startBlock, final int headerPropertiesStartBlock)
throws IOException;

/**
* set the associated BlockAllocationTable
*
* @param bat the associated BlockAllocationTable
*
* @exception IOException
*/

void setBAT(final BlockAllocationTableReader bat) throws IOException;
int blockCount();
} // end public interface BlockList


+ 0
- 161
src/java/org/apache/poi/poifs/storage/BlockListImpl.java View File

@@ -1,161 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import java.io.IOException;

import org.apache.poi.util.Internal;

/**
* A simple implementation of BlockList
*
* @author Marc Johnson (mjohnson at apache dot org
*/
abstract class BlockListImpl implements BlockList {
private ListManagedBlock[] _blocks;
private BlockAllocationTableReader _bat;

protected BlockListImpl()
{
_blocks = new ListManagedBlock[ 0 ];
_bat = null;
}

/**
* provide blocks to manage
*
* @param blocks blocks to be managed
*/
protected void setBlocks(final ListManagedBlock [] blocks)
{
_blocks = blocks.clone();
}

/**
* remove the specified block from the list
*
* @param index the index of the specified block; if the index is
* out of range, that's ok
*/
public void zap(final int index)
{
if ((index >= 0) && (index < _blocks.length))
{
_blocks[ index ] = null;
}
}

/**
* Internal method. Gets, without sanity checks or
* removing.
*/
@Internal
public ListManagedBlock get(final int index) {
return _blocks[index];
}

/**
* remove and return the specified block from the list
*
* @param index the index of the specified block
*
* @return the specified block
*
* @exception IOException if the index is out of range or has
* already been removed
*/
public ListManagedBlock remove(final int index)
throws IOException
{
ListManagedBlock result = null;

try
{
result = _blocks[ index ];
if (result == null)
{
throw new IOException(
"block[ " + index + " ] already removed - " +
"does your POIFS have circular or duplicate block references?"
);
}
_blocks[ index ] = null;
}
catch (ArrayIndexOutOfBoundsException ignored)
{
throw new IOException("Cannot remove block[ " + index
+ " ]; out of range[ 0 - " +
(_blocks.length-1) + " ]");
}
return result;
}

/**
* get the blocks making up a particular stream in the list. The
* blocks are removed from the list.
*
* @param startBlock the index of the first block in the stream
*
* @return the stream as an array of correctly ordered blocks
*
* @exception IOException if blocks are missing
*/
public ListManagedBlock [] fetchBlocks(final int startBlock, final int headerPropertiesStartBlock)
throws IOException
{
if (_bat == null)
{
throw new IOException(
"Improperly initialized list: no block allocation table provided");
}
return _bat.fetchBlocks(startBlock, headerPropertiesStartBlock, this);
}

/**
* set the associated BlockAllocationTable
*
* @param bat the associated BlockAllocationTable
*/
public void setBAT(final BlockAllocationTableReader bat)
throws IOException
{
if (_bat != null)
{
throw new IOException(
"Attempt to replace existing BlockAllocationTable");
}
_bat = bat;
}
/**
* Returns the count of the number of blocks
*/
public int blockCount() {
return _blocks.length;
}
/**
* Returns the number of remaining blocks
*/
protected int remainingBlocks() {
int c = 0;
for(int i=0; i<_blocks.length; i++) {
if(_blocks[i] != null) c++;
}
return c;
}
}

+ 0
- 186
src/java/org/apache/poi/poifs/storage/DataInputBlock.java View File

@@ -1,186 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

/**
* Wraps a <tt>byte</tt> array and provides simple data input access.
* Internally, this class maintains a buffer read index, so that for the most part, primitive
* data can be read in a data-input-stream-like manner.<p>
*
* Note - the calling class should call the {@link #available()} method to detect end-of-buffer
* and move to the next data block when the current is exhausted.
* For optimisation reasons, no error handling is performed in this class. Thus, mistakes in
* calling code ran may raise ugly exceptions here, like {@link ArrayIndexOutOfBoundsException},
* etc .<p>
*
* The multi-byte primitive input methods ({@link #readUShortLE()}, {@link #readIntLE()} and
* {@link #readLongLE()}) have corresponding 'spanning read' methods which (when required) perform
* a read across the block boundary. These spanning read methods take the previous
* {@link DataInputBlock} as a parameter.
* Reads of larger amounts of data (into <tt>byte</tt> array buffers) must be managed by the caller
* since these could conceivably involve more than two blocks.
*
* @author Josh Micich
*/
public final class DataInputBlock {

/**
* Possibly any size (usually 512K or 64K). Assumed to be at least 8 bytes for all blocks
* before the end of the stream. The last block in the stream can be any size except zero.
*/
private final byte[] _buf;
private int _readIndex;
private int _maxIndex;

DataInputBlock(byte[] data, int startOffset) { // NOSONAR
_buf = data;
_readIndex = startOffset;
_maxIndex = _buf.length;
}
public int available() {
return _maxIndex-_readIndex;
}

public int readUByte() {
return _buf[_readIndex++] & 0xFF;
}

/**
* Reads a <tt>short</tt> which was encoded in <em>little endian</em> format.
*/
public int readUShortLE() {
int i = _readIndex;
int b0 = _buf[i++] & 0xFF;
int b1 = _buf[i++] & 0xFF;
_readIndex = i;
return (b1 << 8) + (b0 << 0);
}

/**
* Reads a <tt>short</tt> which spans the end of <tt>prevBlock</tt> and the start of this block.
*/
public int readUShortLE(DataInputBlock prevBlock) {
// simple case - will always be one byte in each block
int i = prevBlock._buf.length-1;
int b0 = prevBlock._buf[i] & 0xFF;
int b1 = _buf[_readIndex++] & 0xFF;
return (b1 << 8) + (b0 << 0);
}

/**
* Reads an <tt>int</tt> which was encoded in <em>little endian</em> format.
*/
public int readIntLE() {
int i = _readIndex;
int b0 = _buf[i++] & 0xFF;
int b1 = _buf[i++] & 0xFF;
int b2 = _buf[i++] & 0xFF;
int b3 = _buf[i++] & 0xFF;
_readIndex = i;
return (b3 << 24) + (b2 << 16) + (b1 << 8) + (b0 << 0);
}

/**
* Reads an <tt>int</tt> which spans the end of <tt>prevBlock</tt> and the start of this block.
*/
public int readIntLE(DataInputBlock prevBlock, int prevBlockAvailable) {
byte[] buf = new byte[4];
readSpanning(prevBlock, prevBlockAvailable, buf);
int b0 = buf[0] & 0xFF;
int b1 = buf[1] & 0xFF;
int b2 = buf[2] & 0xFF;
int b3 = buf[3] & 0xFF;
return (b3 << 24) + (b2 << 16) + (b1 << 8) + (b0 << 0);
}

/**
* Reads a <tt>long</tt> which was encoded in <em>little endian</em> format.
*/
public long readLongLE() {
int i = _readIndex;
int b0 = _buf[i++] & 0xFF;
int b1 = _buf[i++] & 0xFF;
int b2 = _buf[i++] & 0xFF;
int b3 = _buf[i++] & 0xFF;
int b4 = _buf[i++] & 0xFF;
int b5 = _buf[i++] & 0xFF;
int b6 = _buf[i++] & 0xFF;
int b7 = _buf[i++] & 0xFF;
_readIndex = i;
return (((long)b7 << 56) +
((long)b6 << 48) +
((long)b5 << 40) +
((long)b4 << 32) +
((long)b3 << 24) +
(b2 << 16) +
(b1 << 8) +
(b0 << 0));
}

/**
* Reads a <tt>long</tt> which spans the end of <tt>prevBlock</tt> and the start of this block.
*/
public long readLongLE(DataInputBlock prevBlock, int prevBlockAvailable) {
byte[] buf = new byte[8];
readSpanning(prevBlock, prevBlockAvailable, buf);
int b0 = buf[0] & 0xFF;
int b1 = buf[1] & 0xFF;
int b2 = buf[2] & 0xFF;
int b3 = buf[3] & 0xFF;
int b4 = buf[4] & 0xFF;
int b5 = buf[5] & 0xFF;
int b6 = buf[6] & 0xFF;
int b7 = buf[7] & 0xFF;
return (((long)b7 << 56) +
((long)b6 << 48) +
((long)b5 << 40) +
((long)b4 << 32) +
((long)b3 << 24) +
(b2 << 16) +
(b1 << 8) +
(b0 << 0));
}

/**
* Reads a small amount of data from across the boundary between two blocks.
* The {@link #_readIndex} of this (the second) block is updated accordingly.
* Note- this method (and other code) assumes that the second {@link DataInputBlock}
* always is big enough to complete the read without being exhausted.
*/
private void readSpanning(DataInputBlock prevBlock, int prevBlockAvailable, byte[] buf) {
System.arraycopy(prevBlock._buf, prevBlock._readIndex, buf, 0, prevBlockAvailable);
int secondReadLen = buf.length-prevBlockAvailable;
System.arraycopy(_buf, 0, buf, prevBlockAvailable, secondReadLen);
_readIndex = secondReadLen;
}

/**
* Reads <tt>len</tt> bytes from this block into the supplied buffer.
*/
public void readFully(byte[] buf, int off, int len) {
System.arraycopy(_buf, _readIndex, buf, off, len);
_readIndex += len;
}
}

+ 0
- 204
src/java/org/apache/poi/poifs/storage/DocumentBlock.java View File

@@ -1,204 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;

import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.util.IOUtils;

/**
* A block of document data.
*
* @author Marc Johnson (mjohnson at apache dot org)
*/
public final class DocumentBlock extends BigBlock {

//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 100_000;

private static final byte _default_value = ( byte ) 0xFF;
private byte[] _data;
private int _bytes_read;

/**
* create a document block from a raw data block
*
* @param block the raw data block
*
* @exception IOException
*/

public DocumentBlock(final RawDataBlock block)
throws IOException
{
super(
block.getBigBlockSize() == POIFSConstants.SMALLER_BIG_BLOCK_SIZE ?
POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS :
POIFSConstants.LARGER_BIG_BLOCK_SIZE_DETAILS
);
_data = block.getData();
_bytes_read = _data.length;
}

/**
* Create a single instance initialized with data.
*
* @param stream the InputStream delivering the data.
*
* @exception IOException
*/

public DocumentBlock(final InputStream stream, POIFSBigBlockSize bigBlockSize)
throws IOException
{
this(bigBlockSize);
int count = IOUtils.readFully(stream, _data);

_bytes_read = (count == -1) ? 0
: count;
}

/**
* Create a single instance initialized with default values
*/

private DocumentBlock(POIFSBigBlockSize bigBlockSize)
{
super(bigBlockSize);
_data = IOUtils.safelyAllocate(bigBlockSize.getBigBlockSize(), MAX_RECORD_LENGTH);
Arrays.fill(_data, _default_value);
}

/**
* Get the number of bytes read for this block
*
* @return bytes read into the block
*/

public int size()
{
return _bytes_read;
}

/**
* Was this a partially read block?
*
* @return true if the block was only partially filled with data
*/

public boolean partiallyRead()
{
return _bytes_read != bigBlockSize.getBigBlockSize();
}

/**
* @return the fill byte used
*/

public static byte getFillByte()
{
return _default_value;
}

/**
* convert a single long array into an array of DocumentBlock
* instances
*
* @param array the byte array to be converted
* @param size the intended size of the array (which may be smaller)
*
* @return an array of DocumentBlock instances, filled from the
* input array
*/

public static DocumentBlock [] convert(final POIFSBigBlockSize bigBlockSize,
final byte [] array,
final int size)
{
DocumentBlock[] rval =
new DocumentBlock[ (size + bigBlockSize.getBigBlockSize() - 1) / bigBlockSize.getBigBlockSize() ];
int offset = 0;

for (int k = 0; k < rval.length; k++)
{
rval[ k ] = new DocumentBlock(bigBlockSize);
if (offset < array.length)
{
int length = Math.min(bigBlockSize.getBigBlockSize(),
array.length - offset);

System.arraycopy(array, offset, rval[ k ]._data, 0, length);
if (length != bigBlockSize.getBigBlockSize())
{
Arrays.fill(rval[ k ]._data, length,
bigBlockSize.getBigBlockSize(),
_default_value);
}
}
else
{
Arrays.fill(rval[ k ]._data, _default_value);
}
offset += bigBlockSize.getBigBlockSize();
}
return rval;
}

public static DataInputBlock getDataInputBlock(DocumentBlock[] blocks, int offset) {
if(blocks == null || blocks.length == 0) {
return null;
}
// Key things about the size of the block
POIFSBigBlockSize bigBlockSize = blocks[0].bigBlockSize;
int BLOCK_SHIFT = bigBlockSize.getHeaderValue();
int BLOCK_SIZE = bigBlockSize.getBigBlockSize();
int BLOCK_MASK = BLOCK_SIZE - 1;

// Now do the offset lookup
int firstBlockIndex = offset >> BLOCK_SHIFT;
int firstBlockOffset= offset & BLOCK_MASK;
return new DataInputBlock(blocks[firstBlockIndex]._data, firstBlockOffset);
}

/* ********** START extension of BigBlock ********** */

/**
* Write the block's data to an OutputStream
*
* @param stream the OutputStream to which the stored data should
* be written
*
* @exception IOException on problems writing to the specified
* stream
*/

void writeData(final OutputStream stream)
throws IOException
{
doWriteData(stream, _data);
}

/* ********** END extension of BigBlock ********** */
} // end public class DocumentBlock


+ 4
- 5
src/java/org/apache/poi/poifs/storage/HeaderBlock.java View File

@@ -214,12 +214,12 @@ public final class HeaderBlock implements HeaderBlockConstants {
byte[] data = new byte[512];
int bsCount = IOUtils.readFully(stream, data);
if(bsCount != 512) {
throw alertShortRead(bsCount, 512);
throw alertShortRead(bsCount);
}
return data;
}

private static IOException alertShortRead(int pRead, int expectedReadSize) {
private static IOException alertShortRead(int pRead) {
int read;
if (pRead < 0) {
//Can't have -1 bytes read in the error message!
@@ -230,8 +230,7 @@ public final class HeaderBlock implements HeaderBlockConstants {
String type = " byte" + (read == 1 ? (""): ("s"));

return new IOException("Unable to read entire header; "
+ read + type + " read; expected "
+ expectedReadSize + " bytes");
+ read + type + " read; expected 512 bytes");
}

/**
@@ -372,7 +371,7 @@ public final class HeaderBlock implements HeaderBlockConstants {
* @exception IOException on problems writing to the specified
* stream
*/
void writeData(final OutputStream stream) throws IOException {
public void writeData(final OutputStream stream) throws IOException {
// Update the counts and start positions
new IntegerField(_bat_count_offset, _bat_count, _data);
new IntegerField(_property_start_offset, _property_start, _data);

+ 0
- 195
src/java/org/apache/poi/poifs/storage/HeaderBlockWriter.java View File

@@ -1,195 +0,0 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;

import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.common.POIFSConstants;

/**
* The block containing the archive header
*
* @author Marc Johnson (mjohnson at apache dot org)
*/
public class HeaderBlockWriter implements HeaderBlockConstants, BlockWritable
{
private final HeaderBlock _header_block;

/**
* Create a single instance initialized with default values
*/
public HeaderBlockWriter(POIFSBigBlockSize bigBlockSize)
{
_header_block = new HeaderBlock(bigBlockSize);
}

/**
* Create a single instance initialized with the specified
* existing values
*/
public HeaderBlockWriter(HeaderBlock headerBlock)
{
_header_block = headerBlock;
}

/**
* Set BAT block parameters. Assumes that all BAT blocks are
* contiguous. Will construct XBAT blocks if necessary and return
* the array of newly constructed XBAT blocks.
*
* @param blockCount count of BAT blocks
* @param startBlock index of first BAT block
*
* @return array of XBAT blocks; may be zero length, will not be
* null
*/

public BATBlock [] setBATBlocks(final int blockCount,
final int startBlock)
{
BATBlock[] rvalue;
POIFSBigBlockSize bigBlockSize = _header_block.getBigBlockSize();

_header_block.setBATCount(blockCount);

// Set the BAT locations
int limit = Math.min(blockCount, _max_bats_in_header);
int[] bat_blocks = new int[limit];
for (int j = 0; j < limit; j++) {
bat_blocks[j] = startBlock + j;
}
_header_block.setBATArray(bat_blocks);
// Now do the XBATs
if (blockCount > _max_bats_in_header)
{
int excess_blocks = blockCount - _max_bats_in_header;
int[] excess_block_array = new int[ excess_blocks ];

for (int j = 0; j < excess_blocks; j++)
{
excess_block_array[ j ] = startBlock + j
+ _max_bats_in_header;
}
rvalue = BATBlock.createXBATBlocks(bigBlockSize, excess_block_array,
startBlock + blockCount);
_header_block.setXBATStart(startBlock + blockCount);
}
else
{
rvalue = BATBlock.createXBATBlocks(bigBlockSize, new int[ 0 ], 0);
_header_block.setXBATStart(POIFSConstants.END_OF_CHAIN);
}
_header_block.setXBATCount(rvalue.length);
return rvalue;
}

/**
* Set start of Property Table
*
* @param startBlock the index of the first block of the Property
* Table
*/
public void setPropertyStart(final int startBlock)
{
_header_block.setPropertyStart(startBlock);
}

/**
* Set start of small block allocation table
*
* @param startBlock the index of the first big block of the small
* block allocation table
*/
public void setSBATStart(final int startBlock)
{
_header_block.setSBATStart(startBlock);
}

/**
* Set count of SBAT blocks
*
* @param count the number of SBAT blocks
*/
public void setSBATBlockCount(final int count)
{
_header_block.setSBATBlockCount(count);
}

/**
* For a given number of BAT blocks, calculate how many XBAT
* blocks will be needed
*
* @param blockCount number of BAT blocks
*
* @return number of XBAT blocks needed
*/

static int calculateXBATStorageRequirements(POIFSBigBlockSize bigBlockSize, final int blockCount)
{
return (blockCount > _max_bats_in_header)
? BATBlock.calculateXBATStorageRequirements(
bigBlockSize, blockCount - _max_bats_in_header)
: 0;
}

/* ********** START extension of BigBlock ********** */

/**
* Write the block's data to an OutputStream
*
* @param stream the OutputStream to which the stored data should
* be written
*
* @exception IOException on problems writing to the specified
* stream
*/
public void writeBlocks(final OutputStream stream)
throws IOException
{
_header_block.writeData(stream);
}
/**
* Write the block's data to an existing block
*
* @param block the ByteBuffer of the block to which the
* stored data should be written
*
* @exception IOException on problems writing to the block
*/
public void writeBlock(ByteBuffer block)
throws IOException
{
ByteArrayOutputStream baos = new ByteArrayOutputStream(
_header_block.getBigBlockSize().getBigBlockSize()
);
_header_block.writeData(baos);
block.put(baos.toByteArray());
}

/* ********** END extension of BigBlock ********** */
} // end public class HeaderBlockWriter


+ 0
- 45
src/java/org/apache/poi/poifs/storage/ListManagedBlock.java View File

@@ -1,45 +0,0 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import java.io.IOException;

/**
* An interface for blocks managed by a list that works with a
* BlockAllocationTable to keep block sequences straight
*
* @author Marc Johnson (mjohnson at apache dot org
*/

public interface ListManagedBlock
{

/**
* Get the data from the block
*
* @return the block's data as a byte array
*
* @exception IOException if there is no data
*/

public byte [] getData()
throws IOException;
} // end public interface ListManagedBlock


+ 0
- 127
src/java/org/apache/poi/poifs/storage/PropertyBlock.java View File

@@ -1,127 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import java.io.IOException;
import java.io.OutputStream;
import java.util.List;

import org.apache.poi.poifs.common.POIFSBigBlockSize;
import org.apache.poi.poifs.property.Property;

/**
* A block of Property instances
*
* @author Marc Johnson (mjohnson at apache dot org)
*/
public final class PropertyBlock extends BigBlock {
private Property[] _properties;

/**
* Create a single instance initialized with default values
*
* @param properties the properties to be inserted
* @param offset the offset into the properties array
*/

private PropertyBlock(final POIFSBigBlockSize bigBlockSize, final Property [] properties, final int offset)
{
super(bigBlockSize);
_properties = new Property[ bigBlockSize.getPropertiesPerBlock() ];
for (int j = 0; j < _properties.length; j++)
{
_properties[ j ] = properties[ j + offset ];
}
}

/**
* Create an array of PropertyBlocks from an array of Property
* instances, creating empty Property instances to make up any
* shortfall
*
* @param properties the Property instances to be converted into
* PropertyBlocks, in a java List
*
* @return the array of newly created PropertyBlock instances
*/

public static BlockWritable [] createPropertyBlockArray(
final POIFSBigBlockSize bigBlockSize, final List<Property> properties)
{
int _properties_per_block = bigBlockSize.getPropertiesPerBlock();
int block_count =
(properties.size() + _properties_per_block - 1)
/ _properties_per_block;
Property[] to_be_written =
new Property[ block_count * _properties_per_block ];

System.arraycopy(properties.toArray(new Property[ 0 ]), 0,
to_be_written, 0, properties.size());
for (int j = properties.size(); j < to_be_written.length; j++)
{

// create an instance of an anonymous inner class that
// extends Property
to_be_written[ j ] = new Property()
{
protected void preWrite()
{
}

public boolean isDirectory()
{
return false;
}
};
}
BlockWritable[] rvalue = new BlockWritable[ block_count ];

for (int j = 0; j < block_count; j++)
{
rvalue[ j ] = new PropertyBlock(bigBlockSize, to_be_written,
j * _properties_per_block);
}
return rvalue;
}

/* ********** START extension of BigBlock ********** */

/**
* Write the block's data to an OutputStream
*
* @param stream the OutputStream to which the stored data should
* be written
*
* @exception IOException on problems writing to the specified
* stream
*/

void writeData(final OutputStream stream)
throws IOException
{
int _properties_per_block = bigBlockSize.getPropertiesPerBlock();
for (int j = 0; j < _properties_per_block; j++)
{
_properties[ j ].writeData(stream);
}
}

/* ********** END extension of BigBlock ********** */
} // end public class PropertyBlock


+ 0
- 151
src/java/org/apache/poi/poifs/storage/RawDataBlock.java View File

@@ -1,151 +0,0 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import org.apache.poi.poifs.common.POIFSConstants;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;

import java.io.*;

/**
* A big block created from an InputStream, holding the raw data
*
* @author Marc Johnson (mjohnson at apache dot org
*/

public class RawDataBlock
implements ListManagedBlock
{
//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 100_000;

private byte[] _data;
private boolean _eof;
private boolean _hasData;
static POILogger log = POILogFactory.getLogger(RawDataBlock.class);

/**
* Constructor RawDataBlock
*
* @param stream the InputStream from which the data will be read
*
* @exception IOException on I/O errors, and if an insufficient
* amount of data is read (the InputStream must
* be an exact multiple of the block size)
*/
public RawDataBlock(final InputStream stream)
throws IOException {
this(stream, POIFSConstants.SMALLER_BIG_BLOCK_SIZE);
}
/**
* Constructor RawDataBlock
*
* @param stream the InputStream from which the data will be read
* @param blockSize the size of the POIFS blocks, normally 512 bytes
* {@link org.apache.poi.poifs.common.POIFSConstants#SMALLER_BIG_BLOCK_SIZE}
*
* @exception IOException on I/O errors, and if an insufficient
* amount of data is read (the InputStream must
* be an exact multiple of the block size)
*/
public RawDataBlock(final InputStream stream, int blockSize)
throws IOException {
_data = IOUtils.safelyAllocate(blockSize, MAX_RECORD_LENGTH);
int count = IOUtils.readFully(stream, _data);
_hasData = (count > 0);

if (count == -1) {
_eof = true;
}
else if (count != blockSize) {
// IOUtils.readFully will always read the
// requested number of bytes, unless it hits
// an EOF
_eof = true;
String type = " byte" + ((count == 1) ? ("")
: ("s"));

log.log(POILogger.ERROR,
"Unable to read entire block; " + count
+ type + " read before EOF; expected "
+ blockSize + " bytes. Your document "
+ "was either written by software that "
+ "ignores the spec, or has been truncated!"
);
}
else {
_eof = false;
}
}

/**
* When we read the data, did we hit end of file?
*
* @return true if the EoF was hit during this block, or
* false if not. If you have a dodgy short last block, then
* it's possible to both have data, and also hit EoF...
*/
public boolean eof() {
return _eof;
}
/**
* Did we actually find any data to read? It's possible,
* in the event of a short last block, to both have hit
* the EoF, but also to have data
*/
public boolean hasData() {
return _hasData;
}
public String toString() {
return "RawDataBlock of size " + _data.length;
}

/* ********** START implementation of ListManagedBlock ********** */

/**
* Get the data from the block
*
* @return the block's data as a byte array
*
* @exception IOException if there is no data
*/
public byte [] getData()
throws IOException
{
if (! hasData())
{
throw new IOException("Cannot return empty data");
}
return _data;
}
/**
* What's the big block size?
*/
public int getBigBlockSize() {
return _data.length;
}

/* ********** END implementation of ListManagedBlock ********** */
} // end public class RawDataBlock


+ 0
- 70
src/java/org/apache/poi/poifs/storage/RawDataBlockList.java View File

@@ -1,70 +0,0 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.poifs.storage;

import java.io.*;

import java.util.*;

import org.apache.poi.poifs.common.POIFSBigBlockSize;

/**
* A list of RawDataBlocks instances, and methods to manage the list
*
* @author Marc Johnson (mjohnson at apache dot org
*/

public class RawDataBlockList
extends BlockListImpl
{

/**
* Constructor RawDataBlockList
*
* @param stream the InputStream from which the data will be read
* @param bigBlockSize The big block size, either 512 bytes or 4096 bytes
*
* @exception IOException on I/O errors, and if an incomplete
* block is read
*/

public RawDataBlockList(final InputStream stream, POIFSBigBlockSize bigBlockSize)
throws IOException
{
List<RawDataBlock> blocks = new ArrayList<>();

while (true)
{
RawDataBlock block = new RawDataBlock(stream, bigBlockSize.getBigBlockSize());
// If there was data, add the block to the list
if(block.hasData()) {
blocks.add(block);
}

// If the stream is now at the End Of File, we're done
if (block.eof()) {
break;
}
}
setBlocks( blocks.toArray(new RawDataBlock[ blocks.size() ]) );
}
} // end public class RawDataBlockList


+ 8
- 8
src/java/org/apache/poi/sl/usermodel/SlideShowFactory.java View File

@@ -30,7 +30,7 @@ import org.apache.poi.poifs.crypt.Decryptor;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentFactoryHelper;
import org.apache.poi.poifs.filesystem.FileMagic;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
import org.apache.poi.util.IOUtils;

@@ -38,7 +38,7 @@ public class SlideShowFactory {
/**
* Creates a SlideShow from the given NPOIFSFileSystem.
*
* @param fs The {@link NPOIFSFileSystem} to read the document from
* @param fs The {@link POIFSFileSystem} to read the document from
*
* @return The created SlideShow
*
@@ -47,7 +47,7 @@ public class SlideShowFactory {
public static <
S extends Shape<S,P>,
P extends TextParagraph<S,P,? extends TextRun>
> SlideShow<S,P> create(NPOIFSFileSystem fs) throws IOException {
> SlideShow<S,P> create(POIFSFileSystem fs) throws IOException {
return create(fs, null);
}

@@ -55,7 +55,7 @@ public class SlideShowFactory {
* Creates a SlideShow from the given NPOIFSFileSystem, which may
* be password protected
*
* @param fs The {@link NPOIFSFileSystem} to read the document from
* @param fs The {@link POIFSFileSystem} to read the document from
* @param password The password that should be used or null if no password is necessary.
*
* @return The created SlideShow
@@ -65,7 +65,7 @@ public class SlideShowFactory {
public static <
S extends Shape<S,P>,
P extends TextParagraph<S,P,? extends TextRun>
> SlideShow<S,P> create(final NPOIFSFileSystem fs, String password) throws IOException {
> SlideShow<S,P> create(final POIFSFileSystem fs, String password) throws IOException {
return create(fs.getRoot(), password);
}

@@ -188,7 +188,7 @@ public class SlideShowFactory {
switch (fm) {
case OLE2:
NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
POIFSFileSystem fs = new POIFSFileSystem(is);
return create(fs, password);
case OOXML:
return createXSLFSlideShow(is);
@@ -264,9 +264,9 @@ public class SlideShowFactory {
throw new FileNotFoundException(file.toString());
}

NPOIFSFileSystem fs = null;
POIFSFileSystem fs = null;
try {
fs = new NPOIFSFileSystem(file, readOnly);
fs = new POIFSFileSystem(file, readOnly);
return create(fs, password);
} catch(OfficeXmlFileException e) {
IOUtils.closeQuietly(fs);

+ 8
- 8
src/java/org/apache/poi/ss/usermodel/WorkbookFactory.java View File

@@ -32,7 +32,7 @@ import org.apache.poi.poifs.crypt.Decryptor;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentFactoryHelper;
import org.apache.poi.poifs.filesystem.FileMagic;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.Removal;
@@ -49,13 +49,13 @@ public class WorkbookFactory {
* Note that in order to properly release resources the
* Workbook should be closed after use.
*
* @param fs The {@link NPOIFSFileSystem} to read the document from
* @param fs The {@link POIFSFileSystem} to read the document from
*
* @return The created workbook
*
* @throws IOException if an error occurs while reading the data
*/
public static Workbook create(NPOIFSFileSystem fs) throws IOException {
public static Workbook create(POIFSFileSystem fs) throws IOException {
return create(fs, null);
}

@@ -63,14 +63,14 @@ public class WorkbookFactory {
* Creates a Workbook from the given NPOIFSFileSystem, which may
* be password protected
*
* @param fs The {@link NPOIFSFileSystem} to read the document from
* @param fs The {@link POIFSFileSystem} to read the document from
* @param password The password that should be used or null if no password is necessary.
*
* @return The created Workbook
*
* @throws IOException if an error occurs while reading the data
*/
private static Workbook create(final NPOIFSFileSystem fs, String password) throws IOException {
private static Workbook create(final POIFSFileSystem fs, String password) throws IOException {
return create(fs.getRoot(), password);
}

@@ -208,7 +208,7 @@ public class WorkbookFactory {

switch (fm) {
case OLE2:
NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
POIFSFileSystem fs = new POIFSFileSystem(is);
return create(fs, password);
case OOXML:
return createXSSFWorkbook(is);
@@ -275,9 +275,9 @@ public class WorkbookFactory {
throw new FileNotFoundException(file.toString());
}

NPOIFSFileSystem fs = null;
POIFSFileSystem fs = null;
try {
fs = new NPOIFSFileSystem(file, readOnly);
fs = new POIFSFileSystem(file, readOnly);
return create(fs, password);
} catch(OfficeXmlFileException e) {
IOUtils.closeQuietly(fs);

+ 6
- 4
src/java/org/apache/poi/util/DrawingDump.java View File

@@ -26,18 +26,20 @@ import java.nio.charset.Charset;

import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.usermodel.Sheet;

/**
* Dump out the aggregated escher records
*/
public class DrawingDump
{
public final class DrawingDump {
private DrawingDump() {
}

public static void main( String[] args ) throws IOException {
OutputStreamWriter osw = new OutputStreamWriter(System.out, Charset.defaultCharset());
PrintWriter pw = new PrintWriter(osw);
NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(args[0]));
POIFSFileSystem fs = new POIFSFileSystem(new File(args[0]));
HSSFWorkbook wb = new HSSFWorkbook(fs);
try {
pw.println( "Drawing group:" );

+ 5
- 9
src/ooxml/java/org/apache/poi/ooxml/extractor/ExtractorFactory.java View File

@@ -46,10 +46,9 @@ import org.apache.poi.poifs.filesystem.DirectoryEntry;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.FileMagic;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.NotOLE2FileException;
import org.apache.poi.poifs.filesystem.OfficeXmlFileException;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.NotImplemented;
@@ -132,9 +131,9 @@ public final class ExtractorFactory {

@SuppressWarnings("unchecked")
public static <T extends POITextExtractor> T createExtractor(File f) throws IOException, OpenXML4JException, XmlException {
NPOIFSFileSystem fs = null;
POIFSFileSystem fs = null;
try {
fs = new NPOIFSFileSystem(f);
fs = new POIFSFileSystem(f);
if (fs.getRoot().hasEntry(Decryptor.DEFAULT_POIFS_ENTRY)) {
return (T)createEncryptedOOXMLExtractor(fs);
}
@@ -166,7 +165,7 @@ public final class ExtractorFactory {
switch (fm) {
case OLE2:
NPOIFSFileSystem fs = new NPOIFSFileSystem(is);
POIFSFileSystem fs = new POIFSFileSystem(is);
boolean isEncrypted = fs.getRoot().hasEntry(Decryptor.DEFAULT_POIFS_ENTRY);
return isEncrypted ? createEncryptedOOXMLExtractor(fs) : createExtractor(fs);
case OOXML:
@@ -262,9 +261,6 @@ public final class ExtractorFactory {
public static <T extends POITextExtractor> T createExtractor(POIFSFileSystem fs) throws IOException, OpenXML4JException, XmlException {
return createExtractor(fs.getRoot());
}
public static <T extends POITextExtractor> T createExtractor(NPOIFSFileSystem fs) throws IOException, OpenXML4JException, XmlException {
return createExtractor(fs.getRoot());
}

@SuppressWarnings("unchecked")
public static <T extends POITextExtractor> T createExtractor(DirectoryNode poifsDir) throws IOException, OpenXML4JException, XmlException
@@ -408,7 +404,7 @@ public final class ExtractorFactory {
throw new IllegalStateException("Not yet supported");
}
private static POITextExtractor createEncryptedOOXMLExtractor(NPOIFSFileSystem fs)
private static POITextExtractor createEncryptedOOXMLExtractor(POIFSFileSystem fs)
throws IOException {
String pass = Biff8EncryptionKey.getCurrentUserPassword();
if (pass == null) {

+ 1
- 1
src/ooxml/java/org/apache/poi/xssf/usermodel/XSSFCellStyle.java View File

@@ -1002,7 +1002,7 @@ public class XSSFCellStyle implements CellStyle {
@Override
public void setFont(Font font) {
if(font != null){
long index = font.getIndex();
long index = font.getIndexAsInt();
this._cellXf.setFontId(index);
this._cellXf.setApplyFont(true);
} else {

+ 1
- 2
src/ooxml/testcases/org/apache/poi/poifs/crypt/TestDecryptor.java View File

@@ -35,7 +35,6 @@ import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.poi.POIDataSamples;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.junit.Assume;
@@ -134,7 +133,7 @@ public class TestDecryptor {
// the fix limits the available size and tries to read all entries
File f = samples.getFile("extenxls_pwd123.xlsx");

try (NPOIFSFileSystem fs = new NPOIFSFileSystem(f, true)) {
try (POIFSFileSystem fs = new POIFSFileSystem(f, true)) {
EncryptionInfo info = new EncryptionInfo(fs);
Decryptor d = Decryptor.getInstance(info);
d.verifyPassword("pwd123");

+ 10
- 11
src/ooxml/testcases/org/apache/poi/poifs/crypt/TestEncryptor.java View File

@@ -37,7 +37,6 @@ import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentEntry;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.TempFile;
@@ -102,7 +101,7 @@ public class TestEncryptor {
final EncryptionInfo infoExpected;
final Decryptor decExpected;

try (NPOIFSFileSystem nfs = new NPOIFSFileSystem(file, true)) {
try (POIFSFileSystem nfs = new POIFSFileSystem(file, true)) {

// Check the encryption details
infoExpected = new EncryptionInfo(nfs);
@@ -159,7 +158,7 @@ public class TestEncryptor {
final EncryptionInfo infoActual2;
final byte[] payloadActual, encPackActual;
final long decPackLenActual;
try (NPOIFSFileSystem nfs = new NPOIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()))) {
try (POIFSFileSystem nfs = new POIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()))) {
infoActual2 = new EncryptionInfo(nfs.getRoot());
Decryptor decActual = Decryptor.getInstance(infoActual2);
boolean passed = decActual.verifyPassword(pass);
@@ -196,7 +195,7 @@ public class TestEncryptor {
final byte[] payloadExpected;
final EncryptionInfo infoExpected;
final Decryptor d;
try (NPOIFSFileSystem nfs = new NPOIFSFileSystem(file, true)) {
try (POIFSFileSystem nfs = new POIFSFileSystem(file, true)) {

// Check the encryption details
infoExpected = new EncryptionInfo(nfs);
@@ -260,7 +259,7 @@ public class TestEncryptor {
}

final byte[] payloadActual;
try (NPOIFSFileSystem nfs = new NPOIFSFileSystem(new ByteArrayInputStream(encBytes))) {
try (POIFSFileSystem nfs = new POIFSFileSystem(new ByteArrayInputStream(encBytes))) {
final EncryptionInfo ei = new EncryptionInfo(nfs);
Decryptor d2 = Decryptor.getInstance(ei);
assertTrue("Unable to process: document is encrypted", d2.verifyPassword(pass));
@@ -297,7 +296,7 @@ public class TestEncryptor {
Encryptor enc = info.getEncryptor();
enc.confirmPassword("password");

try (NPOIFSFileSystem fs = new NPOIFSFileSystem()) {
try (POIFSFileSystem fs = new POIFSFileSystem()) {

try (OutputStream os = enc.getDataStream(fs)) {
pkg.save(os);
@@ -311,11 +310,11 @@ public class TestEncryptor {
}

try (NPOIFSFileSystem inpFS = new NPOIFSFileSystem(new ByteArrayInputStream(encBytes))) {
try (POIFSFileSystem inpFS = new POIFSFileSystem(new ByteArrayInputStream(encBytes))) {
// Check we can decrypt it
EncryptionInfo info = new EncryptionInfo(inpFS);
Decryptor d = Decryptor.getInstance(info);
assertEquals(true, d.verifyPassword("password"));
assertTrue(d.verifyPassword("password"));

try (OPCPackage inpPkg = OPCPackage.open(d.getDataStream(inpFS))) {
// Check it now has empty core properties
@@ -338,7 +337,7 @@ public class TestEncryptor {
IOUtils.copy(fis, fos);
}
try (NPOIFSFileSystem fs = new NPOIFSFileSystem(f, false)) {
try (POIFSFileSystem fs = new POIFSFileSystem(f, false)) {

// decrypt the protected file - in this case it was encrypted with the default password
EncryptionInfo encInfo = new EncryptionInfo(fs);
@@ -480,7 +479,7 @@ public class TestEncryptor {

final byte[] epNewBytes;
final EncryptionInfo infoReload;
try (NPOIFSFileSystem fsNew = new NPOIFSFileSystem()) {
try (POIFSFileSystem fsNew = new POIFSFileSystem()) {
try (OutputStream os = enc.getDataStream(fsNew)) {
os.write(zipInput);
}
@@ -488,7 +487,7 @@ public class TestEncryptor {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
fsNew.writeFilesystem(bos);

try (NPOIFSFileSystem fsReload = new NPOIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()))) {
try (POIFSFileSystem fsReload = new POIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()))) {
infoReload = new EncryptionInfo(fsReload);
try (InputStream epReload = fsReload.getRoot().createDocumentInputStream("EncryptedPackage")) {
epNewBytes = IOUtils.toByteArray(epReload, 9400);

+ 16
- 23
src/ooxml/testcases/org/apache/poi/xslf/usermodel/TestXSLFSlideShowFactory.java View File

@@ -28,7 +28,7 @@ import org.apache.poi.POIDataSamples;
import org.apache.poi.poifs.crypt.EncryptionInfo;
import org.apache.poi.poifs.crypt.EncryptionMode;
import org.apache.poi.poifs.crypt.Encryptor;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.usermodel.BaseTestSlideShowFactory;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.TempFile;
@@ -93,27 +93,20 @@ public final class TestXSLFSlideShowFactory extends BaseTestSlideShowFactory {
}

private static File createProtected() throws IOException, GeneralSecurityException {
return createProtected(filename, password);
}

private static File createProtected(String basefile, String password)
throws IOException, GeneralSecurityException {
NPOIFSFileSystem fs = new NPOIFSFileSystem();
EncryptionInfo info = new EncryptionInfo(EncryptionMode.agile);
Encryptor enc = info.getEncryptor();
enc.confirmPassword(password);
InputStream fis = _slTests.openResourceAsStream(basefile);
OutputStream os = enc.getDataStream(fs);
IOUtils.copy(fis, os);
os.close();
fis.close();
File tf = TempFile.createTempFile("test-xslf-slidefactory", ".pptx");
FileOutputStream fos = new FileOutputStream(tf);
fs.writeFilesystem(fos);
fos.close();
fs.close();

return tf;
try (POIFSFileSystem fs = new POIFSFileSystem()) {
EncryptionInfo info = new EncryptionInfo(EncryptionMode.agile);
Encryptor enc = info.getEncryptor();
enc.confirmPassword(password);
try (InputStream fis = _slTests.openResourceAsStream(filename);
OutputStream os = enc.getDataStream(fs)) {
IOUtils.copy(fis, os);
}

File tf = TempFile.createTempFile("test-xslf-slidefactory", ".pptx");
try (FileOutputStream fos = new FileOutputStream(tf)) {
fs.writeFilesystem(fos);
}
return tf;
}
}
}

+ 20
- 23
src/ooxml/testcases/org/apache/poi/xssf/usermodel/TestXSSFBugs.java View File

@@ -67,7 +67,6 @@ import org.apache.poi.openxml4j.opc.PackagePart;
import org.apache.poi.openxml4j.opc.PackageRelationship;
import org.apache.poi.openxml4j.opc.PackagingURIHelper;
import org.apache.poi.openxml4j.util.ZipSecureFile;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.ITestDataProvider;
import org.apache.poi.ss.SpreadsheetVersion;
@@ -1004,10 +1003,10 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {


// Add some more tables, and check
t = s2.createTable();
t = s2.createTable(null);
t.setName("New 2");
t.setDisplayName("New 2");
t = s3.createTable();
t = s3.createTable(null);
t.setName("New 3");
t.setDisplayName("New 3");

@@ -1478,7 +1477,7 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
}

@Test
public void bug55692_stream() throws IOException, InvalidFormatException {
public void bug55692_stream() throws IOException {
// Directly on a Stream, will go via NPOIFS and spot it's
// actually a .xlsx file encrypted with the default password, and open
Workbook wb = WorkbookFactory.create(
@@ -1492,7 +1491,7 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
public void bug55692_npoifs() throws IOException {
// Via a NPOIFSFileSystem, will spot it's actually a .xlsx file
// encrypted with the default password, and open
NPOIFSFileSystem fsNP = new NPOIFSFileSystem(
POIFSFileSystem fsNP = new POIFSFileSystem(
POIDataSamples.getPOIFSInstance().openResourceAsStream("protect.xlsx"));
Workbook wb = WorkbookFactory.create(fsNP);
assertNotNull(wb);
@@ -1972,7 +1971,7 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
* error message if given one
*/
@Test
public void bug56800_xlsb() throws IOException, InvalidFormatException {
public void bug56800_xlsb() throws IOException {
// Can be opened at the OPC level
OPCPackage pkg = XSSFTestDataSamples.openSamplePackage("Simple.xlsb");

@@ -2519,7 +2518,7 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
}

private void runTest56574(boolean createRow) throws IOException {
Workbook wb = XSSFTestDataSamples.openSampleWorkbook("56574.xlsx");
XSSFWorkbook wb = XSSFTestDataSamples.openSampleWorkbook("56574.xlsx");

Sheet sheet = wb.getSheet("Func");
assertNotNull(sheet);
@@ -2562,17 +2561,17 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
}
}

XSSFFormulaEvaluator.evaluateAllFormulaCells((XSSFWorkbook) wb);
XSSFFormulaEvaluator.evaluateAllFormulaCells(wb);
wb.getCreationHelper().createFormulaEvaluator().evaluateAll();

CalculationChain chain = ((XSSFWorkbook) wb).getCalculationChain();
CalculationChain chain = wb.getCalculationChain();
checkCellsAreGone(chain);

Workbook wbBack = XSSFTestDataSamples.writeOutAndReadBack(wb);
XSSFWorkbook wbBack = XSSFTestDataSamples.writeOutAndReadBack(wb);
Sheet sheetBack = wbBack.getSheet("Func");
assertNotNull(sheetBack);

chain = ((XSSFWorkbook) wbBack).getCalculationChain();
chain = wbBack.getCalculationChain();
checkCellsAreGone(chain);

wbBack.close();
@@ -2653,7 +2652,7 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
}

@Test
public void test51626() throws IOException, InvalidFormatException {
public void test51626() throws IOException {
Workbook wb = XSSFTestDataSamples.openSampleWorkbook("51626.xlsx");
assertNotNull(wb);
wb.close();
@@ -3204,7 +3203,7 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
final String initialFormula = "A1";
final String expectedFormula = "#REF!"; // from ms excel

Workbook wb = new XSSFWorkbook();
XSSFWorkbook wb = new XSSFWorkbook();
Sheet sheet = wb.createSheet("sheet1");
sheet.createRow(0).createCell(0).setCellValue(1); // A1 = 1

@@ -3219,7 +3218,7 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
{
FormulaShifter formulaShifter = FormulaShifter.createForRowCopy(0, "sheet1", 2/*firstRowToShift*/, 2/*lastRowToShift*/
, -1/*step*/, SpreadsheetVersion.EXCEL2007); // parameters 2, 2, -1 should mean : move row range [2-2] one level up
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create((XSSFWorkbook) wb);
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs = FormulaParser.parse(initialFormula, fpb, FormulaType.CELL, 0); // [A1]
formulaShifter.adjustFormula(ptgs, 0); // adjusted to [A]
String shiftedFmla = FormulaRenderer.toFormulaString(fpb, ptgs); //A
@@ -3231,7 +3230,7 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
{
FormulaShifter formulaShifter = FormulaShifter.createForRowShift(0, "sheet1", 2/*firstRowToShift*/, 2/*lastRowToShift*/
, -1/*step*/, SpreadsheetVersion.EXCEL2007); // parameters 2, 2, -1 should mean : move row range [2-2] one level up
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create((XSSFWorkbook) wb);
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs = FormulaParser.parse(initialFormula, fpb, FormulaType.CELL, 0); // [A1]
formulaShifter.adjustFormula(ptgs, 0); // adjusted to [A]
String shiftedFmla = FormulaRenderer.toFormulaString(fpb, ptgs); //A
@@ -3276,18 +3275,18 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
XSSFWorkbook wb = new XSSFWorkbook();

XSSFSheet sheet = wb.createSheet();
XSSFTable table1 = sheet.createTable();
XSSFTable table2 = sheet.createTable();
XSSFTable table3 = sheet.createTable();
XSSFTable table1 = sheet.createTable(null);
XSSFTable table2 = sheet.createTable(null);
XSSFTable table3 = sheet.createTable(null);

sheet.removeTable(table1);

sheet.createTable();
sheet.createTable(null);

sheet.removeTable(table2);
sheet.removeTable(table3);

sheet.createTable();
sheet.createTable(null);

wb.close();
}
@@ -3295,7 +3294,6 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
/**
* Auto column sizing failed when there were loads of fonts with
* errors like ArrayIndexOutOfBoundsException: -32765
* TODO Get this to actually reproduce the bug...
*/
@Test
public void test62108() {
@@ -3309,6 +3307,7 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
for (int i=0; i<fonts.length; i++) {
XSSFFont font = wb.createFont();
font.setFontHeight(i);
fonts[i] = font;
}
// Create a moderate number of columns, which use
@@ -3356,7 +3355,5 @@ public final class TestXSSFBugs extends BaseTestBugzillaIssues {
sheet = wbBack.getSheetAt(0);
assertEquals("E11", sheet.getActiveCell().formatAsString());
wbBack.close();

//wb.write(new FileOutputStream("c:/temp/61905." + instance.getStandardFileNameExtension()));
}
}

+ 3
- 3
src/ooxml/testcases/org/apache/poi/xwpf/TestXWPFBugs.java View File

@@ -34,7 +34,7 @@ import org.apache.poi.poifs.crypt.CipherAlgorithm;
import org.apache.poi.poifs.crypt.Decryptor;
import org.apache.poi.poifs.crypt.EncryptionInfo;
import org.apache.poi.poifs.crypt.HashAlgorithm;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.xwpf.extractor.XWPFWordExtractor;
import org.apache.poi.xwpf.usermodel.XWPFDocument;
import org.apache.xmlbeans.XmlException;
@@ -50,7 +50,7 @@ public class TestXWPFBugs {
@Test
public void bug53475NoCSPName() throws Exception {
File file = POIDataSamples.getDocumentInstance().getFile("bug53475-password-is-solrcell.docx");
NPOIFSFileSystem filesystem = new NPOIFSFileSystem(file, true);
POIFSFileSystem filesystem = new POIFSFileSystem(file, true);

// Check the encryption details
EncryptionInfo info = new EncryptionInfo(filesystem);
@@ -85,7 +85,7 @@ public class TestXWPFBugs {
Assume.assumeTrue("Please install JCE Unlimited Strength Jurisdiction Policy files for AES 256", maxKeyLen == 2147483647);

File file = POIDataSamples.getDocumentInstance().getFile("bug53475-password-is-pass.docx");
NPOIFSFileSystem filesystem = new NPOIFSFileSystem(file, true);
POIFSFileSystem filesystem = new POIFSFileSystem(file, true);

// Check the encryption details
EncryptionInfo info = new EncryptionInfo(filesystem);

+ 8
- 17
src/scratchpad/src/org/apache/poi/hdgf/HDGFDiagram.java View File

@@ -17,7 +17,6 @@

package org.apache.poi.hdgf;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;

@@ -30,7 +29,6 @@ import org.apache.poi.hdgf.streams.Stream;
import org.apache.poi.hdgf.streams.StringsStream;
import org.apache.poi.hdgf.streams.TrailerStream;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
@@ -46,30 +44,23 @@ import org.apache.poi.util.LocaleUtil;
public final class HDGFDiagram extends POIReadOnlyDocument {
private static final String VISIO_HEADER = "Visio (TM) Drawing\r\n";

private byte[] _docstream;

private short version;
private long docSize;

private Pointer trailerPointer;
private TrailerStream trailer;

private ChunkFactory chunkFactory;
private PointerFactory ptrFactory;

public HDGFDiagram(POIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}
public HDGFDiagram(NPOIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}

public HDGFDiagram(DirectoryNode dir) throws IOException {
super(dir);

// Grab the document stream
InputStream is = dir.createDocumentInputStream("VisioDocument");
_docstream = IOUtils.toByteArray(is);
is.close();
final byte[] _docstream;
try (InputStream is = dir.createDocumentInputStream("VisioDocument")) {
_docstream = IOUtils.toByteArray(is);
}

// Check it's really visio
String typeString = new String(_docstream, 0, 20, LocaleUtil.CHARSET_1252 );
@@ -78,14 +69,14 @@ public final class HDGFDiagram extends POIReadOnlyDocument {
}

// Grab the version number, 0x1a -> 0x1b
version = LittleEndian.getShort(_docstream, 0x1a);
short version = LittleEndian.getShort(_docstream, 0x1a);
// Grab the document size, 0x1c -> 0x1f
docSize = LittleEndian.getUInt(_docstream, 0x1c);
// ??? 0x20 -> 0x23

// Create the Chunk+Pointer Factories for the document version
ptrFactory = new PointerFactory(version);
chunkFactory = new ChunkFactory(version);
PointerFactory ptrFactory = new PointerFactory(version);
ChunkFactory chunkFactory = new ChunkFactory(version);

// Grab the pointer to the trailer
trailerPointer = ptrFactory.createPointer(_docstream, 0x24);

+ 2
- 2
src/scratchpad/src/org/apache/poi/hdgf/dev/VSDDumper.java View File

@@ -28,7 +28,7 @@ import org.apache.poi.hdgf.pointers.Pointer;
import org.apache.poi.hdgf.streams.ChunkStream;
import org.apache.poi.hdgf.streams.PointerContainingStream;
import org.apache.poi.hdgf.streams.Stream;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
* Developer helper class to dump out the pointer+stream structure
@@ -51,7 +51,7 @@ public final class VSDDumper {
System.exit(1);
}

NPOIFSFileSystem poifs = new NPOIFSFileSystem(new File(args[0]));
POIFSFileSystem poifs = new POIFSFileSystem(new File(args[0]));
try {
HDGFDiagram hdgf = new HDGFDiagram(poifs);


+ 5
- 7
src/scratchpad/src/org/apache/poi/hdgf/extractor/VisioTextExtractor.java View File

@@ -31,7 +31,6 @@ import org.apache.poi.hdgf.streams.ChunkStream;
import org.apache.poi.hdgf.streams.PointerContainingStream;
import org.apache.poi.hdgf.streams.Stream;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
@@ -49,14 +48,13 @@ public final class VisioTextExtractor extends POIOLE2TextExtractor {
public VisioTextExtractor(POIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}
public VisioTextExtractor(NPOIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}

public VisioTextExtractor(DirectoryNode dir) throws IOException {
this(new HDGFDiagram(dir));
}

public VisioTextExtractor(InputStream inp) throws IOException {
this(new NPOIFSFileSystem(inp));
this(new POIFSFileSystem(inp));
}

/**
@@ -70,7 +68,7 @@ public final class VisioTextExtractor extends POIOLE2TextExtractor {
for(Stream stream : hdgf.getTopLevelStreams()) {
findText(stream, text);
}
return text.toArray( new String[text.size()] );
return text.toArray(new String[0]);
}
private void findText(Stream stream, List<String> text) {
if(stream instanceof PointerContainingStream) {
@@ -113,7 +111,7 @@ public final class VisioTextExtractor extends POIOLE2TextExtractor {
*/
@Override
public String getText() {
StringBuffer text = new StringBuffer();
StringBuilder text = new StringBuilder();
for(String t : getAllText()) {
text.append(t);
if(!t.endsWith("\r") && !t.endsWith("\n")) {

+ 2
- 5
src/scratchpad/src/org/apache/poi/hpbf/HPBFDocument.java View File

@@ -26,7 +26,6 @@ import org.apache.poi.hpbf.model.EscherStm;
import org.apache.poi.hpbf.model.MainContents;
import org.apache.poi.hpbf.model.QuillContents;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
@@ -46,11 +45,9 @@ public final class HPBFDocument extends POIReadOnlyDocument {
public HPBFDocument(POIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}
public HPBFDocument(NPOIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}

public HPBFDocument(InputStream inp) throws IOException {
this(new NPOIFSFileSystem(inp));
this(new POIFSFileSystem(inp));
}

/**

+ 5
- 5
src/scratchpad/src/org/apache/poi/hpbf/dev/HPBFDumper.java View File

@@ -24,7 +24,7 @@ import java.io.InputStream;
import org.apache.poi.ddf.DefaultEscherRecordFactory;
import org.apache.poi.ddf.EscherRecord;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LocaleUtil;
@@ -36,14 +36,14 @@ import org.apache.poi.util.StringUtil;
* constructed.
*/
public final class HPBFDumper {
private NPOIFSFileSystem fs;
public HPBFDumper(NPOIFSFileSystem fs) {
private POIFSFileSystem fs;
public HPBFDumper(POIFSFileSystem fs) {
this.fs = fs;
}
@SuppressWarnings("resource")
public HPBFDumper(InputStream inp) throws IOException {
this(new NPOIFSFileSystem(inp));
this(new POIFSFileSystem(inp));
}

private static byte[] getData(DirectoryNode dir, String name) throws IOException {
@@ -83,7 +83,7 @@ public final class HPBFDumper {
System.err.println(" HPBFDumper <filename>");
System.exit(1);
}
HPBFDumper dump = new HPBFDumper(new NPOIFSFileSystem(new File(args[0])));
HPBFDumper dump = new HPBFDumper(new POIFSFileSystem(new File(args[0])));

System.out.println("Dumping " + args[0]);
dump.dumpContents();

+ 3
- 3
src/scratchpad/src/org/apache/poi/hpbf/dev/PLCDumper.java View File

@@ -24,7 +24,7 @@ import java.io.InputStream;
import org.apache.poi.hpbf.HPBFDocument;
import org.apache.poi.hpbf.model.QuillContents;
import org.apache.poi.hpbf.model.qcbits.QCBit;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.HexDump;

/**
@@ -40,11 +40,11 @@ public final class PLCDumper {
doc = hpbfDoc;
qc = doc.getQuillContents();
}
public PLCDumper(NPOIFSFileSystem fs) throws IOException {
public PLCDumper(POIFSFileSystem fs) throws IOException {
this(new HPBFDocument(fs));
}
public PLCDumper(InputStream inp) throws IOException {
this(new NPOIFSFileSystem(inp));
this(new POIFSFileSystem(inp));
}

public static void main(String[] args) throws Exception {

+ 14
- 21
src/scratchpad/src/org/apache/poi/hpbf/extractor/PublisherTextExtractor.java View File

@@ -27,7 +27,6 @@ import org.apache.poi.hpbf.model.qcbits.QCBit;
import org.apache.poi.hpbf.model.qcbits.QCTextBit;
import org.apache.poi.hpbf.model.qcbits.QCPLCBit.Type12;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
@@ -47,9 +46,6 @@ public final class PublisherTextExtractor extends POIOLE2TextExtractor {
public PublisherTextExtractor(POIFSFileSystem fs) throws IOException {
this(new HPBFDocument(fs));
}
public PublisherTextExtractor(NPOIFSFileSystem fs) throws IOException {
this(new HPBFDocument(fs));
}
public PublisherTextExtractor(InputStream is) throws IOException {
this(new POIFSFileSystem(is));
}
@@ -69,10 +65,10 @@ public final class PublisherTextExtractor extends POIOLE2TextExtractor {

// Get the text from the Quill Contents
QCBit[] bits = doc.getQuillContents().getBits();
for(int i=0; i<bits.length; i++) {
if(bits[i] != null && bits[i] instanceof QCTextBit) {
QCTextBit t = (QCTextBit)bits[i];
text.append( t.getText().replace('\r', '\n') );
for (QCBit bit1 : bits) {
if (bit1 != null && bit1 instanceof QCTextBit) {
QCTextBit t = (QCTextBit) bit1;
text.append(t.getText().replace('\r', '\n'));
}
}

@@ -82,10 +78,10 @@ public final class PublisherTextExtractor extends POIOLE2TextExtractor {
// hyperlink is in, and we have yet to figure out
// how to tie that together.
if(hyperlinksByDefault) {
for(int i=0; i<bits.length; i++) {
if(bits[i] != null && bits[i] instanceof Type12) {
Type12 hyperlinks = (Type12)bits[i];
for(int j=0; j<hyperlinks.getNumberOfHyperlinks(); j++) {
for (QCBit bit : bits) {
if (bit != null && bit instanceof Type12) {
Type12 hyperlinks = (Type12) bit;
for (int j = 0; j < hyperlinks.getNumberOfHyperlinks(); j++) {
text.append("<");
text.append(hyperlinks.getHyperlink(j));
text.append(">\n");
@@ -107,15 +103,12 @@ public final class PublisherTextExtractor extends POIOLE2TextExtractor {
System.err.println(" PublisherTextExtractor <file.pub>");
}

for(int i=0; i<args.length; i++) {
FileInputStream fis = new FileInputStream(args[i]);
try {
PublisherTextExtractor te = new PublisherTextExtractor(fis);
System.out.println(te.getText());
te.close();
} finally {
fis.close();
}
for (String arg : args) {
try (FileInputStream fis = new FileInputStream(arg)) {
PublisherTextExtractor te = new PublisherTextExtractor(fis);
System.out.println(te.getText());
te.close();
}
}
}
}

+ 9
- 9
src/scratchpad/src/org/apache/poi/hslf/dev/PPTXMLDump.java View File

@@ -30,7 +30,7 @@ import java.nio.charset.StandardCharsets;
import org.apache.poi.hslf.record.RecordTypes;
import org.apache.poi.hslf.usermodel.HSLFSlideShow;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;

@@ -54,7 +54,7 @@ public final class PPTXMLDump {
private boolean hexHeader = true;

public PPTXMLDump(File ppt) throws IOException {
NPOIFSFileSystem fs = new NPOIFSFileSystem(ppt, true);
POIFSFileSystem fs = new POIFSFileSystem(ppt, true);
try {
docstream = readEntry(fs, HSLFSlideShow.POWERPOINT_DOCUMENT);
pictstream = readEntry(fs, PICTURES_ENTRY);
@@ -63,7 +63,7 @@ public final class PPTXMLDump {
}
}

private static byte[] readEntry(NPOIFSFileSystem fs, String entry)
private static byte[] readEntry(POIFSFileSystem fs, String entry)
throws IOException {
DirectoryNode dn = fs.getRoot();
if (!dn.hasEntry(entry)) {
@@ -198,19 +198,19 @@ public final class PPTXMLDump {
return;
}
boolean outFile = false;
for (int i = 0; i < args.length; i++){
for (String arg : args) {

if (args[i].startsWith("-")) {
if ("-f".equals(args[i])){
if (arg.startsWith("-")) {
if ("-f".equals(arg)) {
//write ouput to a file
outFile = true;
}
} else {
File ppt = new File(args[i]);
File ppt = new File(arg);
PPTXMLDump dump = new PPTXMLDump(ppt);
System.out.println("Dumping " + args[i]);
System.out.println("Dumping " + arg);

if (outFile){
if (outFile) {
FileOutputStream fos = new FileOutputStream(ppt.getName() + ".xml");
OutputStreamWriter out = new OutputStreamWriter(fos, StandardCharsets.UTF_8);
dump.dump(out);

+ 3
- 3
src/scratchpad/src/org/apache/poi/hslf/dev/SlideShowDumper.java View File

@@ -30,7 +30,7 @@ import org.apache.poi.ddf.EscherTextboxRecord;
import org.apache.poi.hslf.record.HSLFEscherRecordFactory;
import org.apache.poi.hslf.record.RecordTypes;
import org.apache.poi.hslf.usermodel.HSLFSlideShow;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
@@ -77,7 +77,7 @@ public final class SlideShowDumper {
filename = args[1];
}

NPOIFSFileSystem poifs = new NPOIFSFileSystem(new File(filename));
POIFSFileSystem poifs = new POIFSFileSystem(new File(filename));
SlideShowDumper foo = new SlideShowDumper(poifs, System.out);
poifs.close();

@@ -99,7 +99,7 @@ public final class SlideShowDumper {
* @param filesystem the POIFS FileSystem to read from
* @throws IOException if there is a problem while parsing the document.
*/
public SlideShowDumper(NPOIFSFileSystem filesystem, PrintStream out) throws IOException {
public SlideShowDumper(POIFSFileSystem filesystem, PrintStream out) throws IOException {
// Grab the document stream
InputStream is = filesystem.createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT);
docstream = IOUtils.toByteArray(is);

+ 0
- 10
src/scratchpad/src/org/apache/poi/hslf/extractor/PowerPointExtractor.java View File

@@ -30,7 +30,6 @@ import org.apache.poi.hslf.usermodel.HSLFSlideShowImpl;
import org.apache.poi.hslf.usermodel.HSLFTextParagraph;
import org.apache.poi.hssf.record.crypto.Biff8EncryptionKey;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.sl.usermodel.SlideShowFactory;
@@ -116,15 +115,6 @@ public final class PowerPointExtractor extends POIOLE2TextExtractor {
this((HSLFSlideShow)SlideShowFactory.create(fs, Biff8EncryptionKey.getCurrentUserPassword()));
}

/**
* Creates a PowerPointExtractor, from an open NPOIFSFileSystem
*
* @param fs the NPOIFSFileSystem containing the PowerPoint document
*/
public PowerPointExtractor(NPOIFSFileSystem fs) throws IOException {
this((HSLFSlideShow)SlideShowFactory.create(fs, Biff8EncryptionKey.getCurrentUserPassword()));
}

/**
* Creates a PowerPointExtractor, from a specific place
* inside an open NPOIFSFileSystem

+ 5
- 5
src/scratchpad/src/org/apache/poi/hslf/extractor/QuickButCruddyTextExtractor.java View File

@@ -30,7 +30,7 @@ import org.apache.poi.hslf.record.TextBytesAtom;
import org.apache.poi.hslf.record.TextCharsAtom;
import org.apache.poi.hslf.usermodel.HSLFSlideShow;
import org.apache.poi.hslf.usermodel.HSLFTextParagraph;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;

@@ -53,7 +53,7 @@ import org.apache.poi.util.LittleEndian;
* lucene indexers) that would ever want to use this!
*/
public final class QuickButCruddyTextExtractor {
private NPOIFSFileSystem fs;
private POIFSFileSystem fs;
private InputStream is;
private byte[] pptContents;

@@ -82,7 +82,7 @@ public final class QuickButCruddyTextExtractor {
*/
@SuppressWarnings("resource")
public QuickButCruddyTextExtractor(String fileName) throws IOException {
this(new NPOIFSFileSystem(new File(fileName)));
this(new POIFSFileSystem(new File(fileName)));
}

/**
@@ -91,7 +91,7 @@ public final class QuickButCruddyTextExtractor {
*/
@SuppressWarnings("resource")
public QuickButCruddyTextExtractor(InputStream iStream) throws IOException {
this(new NPOIFSFileSystem(iStream));
this(new POIFSFileSystem(iStream));
is = iStream;
}

@@ -99,7 +99,7 @@ public final class QuickButCruddyTextExtractor {
* Creates an extractor from a POIFS Filesystem
* @param poifs
*/
public QuickButCruddyTextExtractor(NPOIFSFileSystem poifs) throws IOException {
public QuickButCruddyTextExtractor(POIFSFileSystem poifs) throws IOException {
fs = poifs;

// Find the PowerPoint bit, and get out the bytes

+ 8
- 8
src/scratchpad/src/org/apache/poi/hslf/record/CurrentUserAtom.java View File

@@ -30,7 +30,7 @@ import org.apache.poi.hslf.exceptions.CorruptPowerPointFileException;
import org.apache.poi.hslf.exceptions.OldPowerPointFormatException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentEntry;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.POILogFactory;
@@ -49,13 +49,13 @@ public class CurrentUserAtom
private static final int MAX_RECORD_LENGTH = 1_000_000;

/** Standard Atom header */
public static final byte[] atomHeader = new byte[] { 0, 0, -10, 15 };
private static final byte[] atomHeader = new byte[] { 0, 0, -10, 15 };
/** The PowerPoint magic number for a non-encrypted file */
public static final byte[] headerToken = new byte[] { 95, -64, -111, -29 };
/** The PowerPoint magic number for an encrypted file */
public static final byte[] encHeaderToken = new byte[] { -33, -60, -47, -13 };
/** The Powerpoint 97 version, major and minor numbers */
public static final byte[] ppt97FileVer = new byte[] { 8, 00, -13, 03, 03, 00 };
private static final byte[] headerToken = new byte[] { 95, -64, -111, -29 };
/** The PowerPoint magic number for an encrypted file */
private static final byte[] encHeaderToken = new byte[] { -33, -60, -47, -13 };
// The Powerpoint 97 version, major and minor numbers
// byte[] ppt97FileVer = new byte[] { 8, 00, -13, 03, 03, 00 };

/** The version, major and minor numbers */
private int docFinalVersion;
@@ -274,7 +274,7 @@ public class CurrentUserAtom
/**
* Writes ourselves back out to a filesystem
*/
public void writeToFS(NPOIFSFileSystem fs) throws IOException {
public void writeToFS(POIFSFileSystem fs) throws IOException {
// Grab contents
ByteArrayOutputStream baos = new ByteArrayOutputStream();
writeOut(baos);

+ 33
- 36
src/scratchpad/src/org/apache/poi/hslf/usermodel/HSLFSlideShow.java View File

@@ -46,9 +46,8 @@ import org.apache.poi.hslf.model.MovieShape;
import org.apache.poi.hslf.record.*;
import org.apache.poi.hslf.record.SlideListWithText.SlideAtomsSet;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.Ole10Native;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.filesystem.Ole10Native;
import org.apache.poi.sl.usermodel.MasterSheet;
import org.apache.poi.sl.usermodel.PictureData.PictureType;
import org.apache.poi.sl.usermodel.Resources;
@@ -149,7 +148,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
* Constructs a Powerpoint document from an POIFSFileSystem.
*/
@SuppressWarnings("resource")
public HSLFSlideShow(NPOIFSFileSystem npoifs) throws IOException {
public HSLFSlideShow(POIFSFileSystem npoifs) throws IOException {
this(new HSLFSlideShowImpl(npoifs));
}

@@ -164,7 +163,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
/**
* @return the current loading/saving phase
*/
protected static LoadSavePhase getLoadSavePhase() {
static LoadSavePhase getLoadSavePhase() {
return loadSavePhase.get();
}

@@ -185,9 +184,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
// PersistPtr, remove their old positions
int[] ids = pph.getKnownSlideIDs();
for (int id : ids) {
if (mostRecentByBytes.containsKey(id)) {
mostRecentByBytes.remove(id);
}
mostRecentByBytes.remove(id);
}

// Now, update the byte level locations with their latest values
@@ -205,7 +202,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
// We'll also want to be able to turn the slide IDs into a position
// in this array
_sheetIdToCoreRecordsLookup = new HashMap<>();
Integer[] allIDs = mostRecentByBytes.keySet().toArray(new Integer[mostRecentByBytes.size()]);
Integer[] allIDs = mostRecentByBytes.keySet().toArray(new Integer[0]);
Arrays.sort(allIDs);
for (int i = 0; i < allIDs.length; i++) {
_sheetIdToCoreRecordsLookup.put(allIDs[i], i);
@@ -534,6 +531,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
/**
* Returns the data of all the embedded OLE object in the SlideShow
*/
@SuppressWarnings("WeakerAccess")
public HSLFObjectData[] getEmbeddedObjects() {
return _hslfSlideShow.getEmbeddedObjects();
}
@@ -563,7 +561,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
/**
* Helper method for usermodel: Get the font collection
*/
protected FontCollection getFontCollection() {
FontCollection getFontCollection() {
return _fonts;
}

@@ -582,6 +580,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
* @param newSlideNumber
* The new slide number (1 based)
*/
@SuppressWarnings("WeakerAccess")
public void reorderSlide(int oldSlideNumber, int newSlideNumber) {
// Ensure these numbers are valid
if (oldSlideNumber < 1 || newSlideNumber < 1) {
@@ -612,7 +611,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
lst.addAll(Arrays.asList(s.getSlideRecords()));
}

Record[] r = lst.toArray(new Record[lst.size()]);
Record[] r = lst.toArray(new Record[0]);
slwt.setChildRecord(r);
}

@@ -627,6 +626,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
* the index of the slide to remove (0-based)
* @return the slide that was removed from the slide show.
*/
@SuppressWarnings("WeakerAccess")
public HSLFSlide removeSlide(int index) {
int lastSlideIdx = _slides.size() - 1;
if (index < 0 || index > lastSlideIdx) {
@@ -656,8 +656,8 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
if (sa.isEmpty()) {
_documentRecord.removeSlideListWithText(slwt);
} else {
slwt.setSlideAtomsSets(sa.toArray(new SlideAtomsSet[sa.size()]));
slwt.setChildRecord(records.toArray(new Record[records.size()]));
slwt.setSlideAtomsSets(sa.toArray(new SlideAtomsSet[0]));
slwt.setChildRecord(records.toArray(new Record[0]));
}

// if the removed slide had notes - remove references to them too
@@ -667,21 +667,23 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
SlideListWithText nslwt = _documentRecord.getNotesSlideListWithText();
records = new ArrayList<>();
ArrayList<SlideAtomsSet> na = new ArrayList<>();
for (SlideAtomsSet ns : nslwt.getSlideAtomsSets()) {
if (ns.getSlidePersistAtom().getSlideIdentifier() == notesId) {
continue;
}
na.add(ns);
records.add(ns.getSlidePersistAtom());
if (ns.getSlideRecords() != null) {
records.addAll(Arrays.asList(ns.getSlideRecords()));
if (nslwt != null) {
for (SlideAtomsSet ns : nslwt.getSlideAtomsSets()) {
if (ns.getSlidePersistAtom().getSlideIdentifier() == notesId) {
continue;
}
na.add(ns);
records.add(ns.getSlidePersistAtom());
if (ns.getSlideRecords() != null) {
records.addAll(Arrays.asList(ns.getSlideRecords()));
}
}
}
if (na.isEmpty()) {
_documentRecord.removeSlideListWithText(nslwt);
} else {
nslwt.setSlideAtomsSets(na.toArray(new SlideAtomsSet[na.size()]));
nslwt.setChildRecord(records.toArray(new Record[records.size()]));
nslwt.setSlideAtomsSets(na.toArray(new SlideAtomsSet[0]));
nslwt.setChildRecord(records.toArray(new Record[0]));
}
}

@@ -712,10 +714,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
SlidePersistAtom prev = null;
for (SlideAtomsSet sas : slist.getSlideAtomsSets()) {
SlidePersistAtom spa = sas.getSlidePersistAtom();
if (spa.getSlideIdentifier() < 0) {
// This is for a master slide
// Odd, since we only deal with the Slide SLWT
} else {
if (spa.getSlideIdentifier() >= 0) {
// Must be for a real slide
if (prev == null) {
prev = spa;
@@ -848,12 +847,9 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
throw new IllegalArgumentException("Unsupported picture format: " + format);
}
byte[] data = IOUtils.safelyAllocate(pict.length(), MAX_RECORD_LENGTH);
FileInputStream is = new FileInputStream(pict);
try {
try (FileInputStream is = new FileInputStream(pict)) {
IOUtils.readFully(is, data);
} finally {
is.close();
}
}
return addPicture(data, format);
}

@@ -969,6 +965,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
* "ShockwaveFlash.ShockwaveFlash.9"
* @return 0-based index of the control
*/
@SuppressWarnings("unused")
public int addControl(String name, String progId) {
ExControl ctrl = new ExControl();
ctrl.setProgId(progId);
@@ -1053,7 +1050,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
return new HPSFPropertiesExtractor(getSlideShowImpl());
}
protected int addToObjListAtom(RecordContainer exObj) {
int addToObjListAtom(RecordContainer exObj) {
ExObjList lst = getDocumentRecord().getExObjList(true);
ExObjListAtom objAtom = lst.getExObjListAtom();
// increment the object ID seed
@@ -1065,7 +1062,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
return objectId;
}

protected static Map<String,ClassID> getOleMap() {
private static Map<String,ClassID> getOleMap() {
Map<String,ClassID> olemap = new HashMap<>();
olemap.put(POWERPOINT_DOCUMENT, ClassIDPredefined.POWERPOINT_V8.getClassID());
// as per BIFF8 spec
@@ -1078,7 +1075,7 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap
return olemap;
}

protected int addPersistentObject(PositionDependentRecord slideRecord) {
private int addPersistentObject(PositionDependentRecord slideRecord) {
slideRecord.setLastOnDiskOffset(HSLFSlideShowImpl.UNSET_OFFSET);
_hslfSlideShow.appendRootLevelRecord((Record)slideRecord);

@@ -1117,13 +1114,13 @@ public final class HSLFSlideShow implements SlideShow<HSLFShape,HSLFTextParagrap

@Override
public MasterSheet<HSLFShape,HSLFTextParagraph> createMasterSheet() throws IOException {
// TODO Auto-generated method stub
// TODO implement or throw exception if not supported
return null;
}

@Override
public Resources getResources() {
// TODO Auto-generated method stub
// TODO implement or throw exception if not supported
return null;
}


+ 3
- 2
src/scratchpad/src/org/apache/poi/hslf/usermodel/HSLFSlideShowFactory.java View File

@@ -20,7 +20,7 @@ package org.apache.poi.hslf.usermodel;
import java.io.IOException;

import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.usermodel.SlideShowFactory;
import org.apache.poi.util.Internal;

@@ -28,6 +28,7 @@ import org.apache.poi.util.Internal;
* Helper class which is instantiated by reflection from
* {@link SlideShowFactory#create(java.io.File)} and similar
*/
@SuppressWarnings("unused")
@Internal
public class HSLFSlideShowFactory extends SlideShowFactory {
/**
@@ -35,7 +36,7 @@ public class HSLFSlideShowFactory extends SlideShowFactory {
* Note that in order to properly release resources the
* SlideShow should be closed after use.
*/
public static HSLFSlideShow createSlideShow(final NPOIFSFileSystem fs) throws IOException {
public static HSLFSlideShow createSlideShow(final POIFSFileSystem fs) throws IOException {
return new HSLFSlideShow(fs);
}


+ 23
- 46
src/scratchpad/src/org/apache/poi/hslf/usermodel/HSLFSlideShowImpl.java View File

@@ -49,7 +49,6 @@ import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentEntry;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.EntryUtils;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.usermodel.PictureData.PictureType;
import org.apache.poi.util.IOUtils;
@@ -63,7 +62,7 @@ import org.apache.poi.util.POILogger;
* "reader". It is only a very basic class for now
*/
public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
public static final int UNSET_OFFSET = -1;
static final int UNSET_OFFSET = -1;

//arbitrarily selected; may need to increase
private static final int MAX_RECORD_LENGTH = 200_000_000;
@@ -122,17 +121,6 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
this(filesystem.getRoot());
}

/**
* Constructs a Powerpoint document from a POIFS Filesystem. Parses the
* document and places all the important stuff into data structures.
*
* @param filesystem the POIFS FileSystem to read from
* @throws IOException if there is a problem while parsing the document.
*/
public HSLFSlideShowImpl(NPOIFSFileSystem filesystem) throws IOException {
this(filesystem.getRoot());
}

/**
* Constructs a Powerpoint document from a specific point in a
* POIFS Filesystem. Parses the document and places all the
@@ -192,7 +180,7 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
* Extracts the main PowerPoint document stream from the
* POI file, ready to be passed
*
* @throws IOException
* @throws IOException when the powerpoint can't be read
*/
private void readPowerPointStream() throws IOException {
// Get the main document stream
@@ -201,11 +189,8 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {

// Grab the document stream
int len = docProps.getSize();
InputStream is = getDirectory().createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT);
try {
try (InputStream is = getDirectory().createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT)) {
_docstream = IOUtils.toByteArray(is, len);
} finally {
is.close();
}
}

@@ -276,7 +261,7 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
}

decryptData.close();
return records.values().toArray(new Record[records.size()]);
return records.values().toArray(new Record[0]);
}

private void initRecordOffsets(byte[] docstream, int usrOffset, NavigableMap<Integer, Record> recordMap, Map<Integer, Integer> offset2id) {
@@ -362,16 +347,15 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
byte[] pictstream = IOUtils.toByteArray(is, entry.getSize());
is.close();

HSLFSlideShowEncrypted decryptData = new HSLFSlideShowEncrypted(getDocumentEncryptionAtom());
try {
try (HSLFSlideShowEncrypted decryptData = new HSLFSlideShowEncrypted(getDocumentEncryptionAtom())) {

int pos = 0;
// An empty picture record (length 0) will take up 8 bytes
while (pos <= (pictstream.length - 8)) {
int offset = pos;
decryptData.decryptPicture(pictstream, offset);
// Image signature
int signature = LittleEndian.getUShort(pictstream, pos);
pos += LittleEndianConsts.SHORT_SIZE;
@@ -381,20 +365,20 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
// Image size (excluding the 8 byte header)
int imgsize = LittleEndian.getInt(pictstream, pos);
pos += LittleEndianConsts.INT_SIZE;
// When parsing the BStoreDelay stream, [MS-ODRAW] says that we
// should terminate if the type isn't 0xf007 or 0xf018->0xf117
if (!((type == 0xf007) || (type >= 0xf018 && type <= 0xf117))) {
break;
}
// The image size must be 0 or greater
// (0 is allowed, but odd, since we do wind on by the header each
// time, so we won't get stuck)
if (imgsize < 0) {
throw new CorruptPowerPointFileException("The file contains a picture, at position " + _pictures.size() + ", which has a negatively sized data length, so we can't trust any of the picture data");
}
// If they type (including the bonus 0xF018) is 0, skip it
PictureType pt = PictureType.forNativeID(type - 0xF018);
if (pt == null) {
@@ -404,7 +388,7 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
//The pictstream can be truncated halfway through a picture.
//This is not a problem if the pictstream contains extra pictures
//that are not used in any slide -- BUG-60305
if (pos+imgsize > pictstream.length) {
if (pos + imgsize > pictstream.length) {
logger.log(POILogger.WARN, "\"Pictures\" stream may have ended early. In some circumstances, this is not a problem; " +
"in others, this could indicate a corrupt file");
break;
@@ -413,12 +397,12 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
try {
HSLFPictureData pict = HSLFPictureData.create(pt);
pict.setSignature(signature);
// Copy the data, ready to pass to PictureData
byte[] imgdata = IOUtils.safelyAllocate(imgsize, MAX_RECORD_LENGTH);
System.arraycopy(pictstream, pos, imgdata, 0, imgdata.length);
pict.setRawData(imgdata);
pict.setOffset(offset);
pict.setIndex(_pictures.size());
_pictures.add(pict);
@@ -426,11 +410,9 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
logger.log(POILogger.ERROR, "Problem reading picture: " + e + "\nYou document will probably become corrupted if you save it!");
}
}
pos += imgsize;
}
} finally {
decryptData.close();
}
}

@@ -456,8 +438,8 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
* @param interestingRecords a map of interesting records (PersistPtrHolder and UserEditAtom)
* referenced by their RecordType. Only the very last of each type will be saved to the map.
* May be null, if not needed.
* @throws IOException
*/
@SuppressWarnings("WeakerAccess")
public void updateAndWriteDependantRecords(OutputStream os, Map<RecordTypes, PositionDependentRecord> interestingRecords)
throws IOException {
// For position dependent records, hold where they were and now are
@@ -602,16 +584,13 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
*/
public void write(File newFile, boolean preserveNodes) throws IOException {
// Get a new FileSystem to write into
POIFSFileSystem outFS = POIFSFileSystem.create(newFile);

try {
try (POIFSFileSystem outFS = POIFSFileSystem.create(newFile)) {
// Write into the new FileSystem
write(outFS, preserveNodes);

// Send the POIFSFileSystem object out to the underlying stream
outFS.writeFilesystem();
} finally {
outFS.close();
}
}

@@ -645,20 +624,17 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
*/
public void write(OutputStream out, boolean preserveNodes) throws IOException {
// Get a new FileSystem to write into
POIFSFileSystem outFS = new POIFSFileSystem();

try {
try (POIFSFileSystem outFS = new POIFSFileSystem()) {
// Write into the new FileSystem
write(outFS, preserveNodes);

// Send the POIFSFileSystem object out to the underlying stream
outFS.writeFilesystem(out);
} finally {
outFS.close();
}
}

private void write(NPOIFSFileSystem outFS, boolean copyAllOtherNodes) throws IOException {
private void write(POIFSFileSystem outFS, boolean copyAllOtherNodes) throws IOException {
// read properties and pictures, with old encryption settings where appropriate
if (_pictures == null) {
readPictures();
@@ -721,7 +697,7 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {


@Override
public EncryptionInfo getEncryptionInfo() throws IOException {
public EncryptionInfo getEncryptionInfo() {
DocumentEncryptionAtom dea = getDocumentEncryptionAtom();
return (dea != null) ? dea.getEncryptionInfo() : null;
}
@@ -735,6 +711,7 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
* Adds a new root level record, at the end, but before the last
* PersistPtrIncrementalBlock.
*/
@SuppressWarnings({"UnusedReturnValue", "WeakerAccess"})
public synchronized int appendRootLevelRecord(Record newRecord) {
int addedAt = -1;
Record[] r = new Record[_records.length + 1];
@@ -839,7 +816,7 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
objects.add(new HSLFObjectData((ExOleObjStg) r));
}
}
_objects = objects.toArray(new HSLFObjectData[objects.size()]);
_objects = objects.toArray(new HSLFObjectData[0]);
}
return _objects;
}
@@ -849,7 +826,7 @@ public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
// only close the filesystem, if we are based on the root node.
// embedded documents/slideshows shouldn't close the parent container
if (getDirectory().getParent() == null) {
NPOIFSFileSystem fs = getDirectory().getFileSystem();
POIFSFileSystem fs = getDirectory().getFileSystem();
if (fs != null) {
fs.close();
}

+ 5
- 5
src/scratchpad/src/org/apache/poi/hsmf/MAPIMessage.java View File

@@ -48,7 +48,7 @@ import org.apache.poi.hsmf.datatypes.Types;
import org.apache.poi.hsmf.exceptions.ChunkNotFoundException;
import org.apache.poi.hsmf.parsers.POIFSChunkParser;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.CodePageUtil;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;
@@ -92,7 +92,7 @@ public class MAPIMessage extends POIReadOnlyDocument {
*/
public MAPIMessage() {
// TODO - make writing possible
super(new NPOIFSFileSystem());
super(new POIFSFileSystem());
}


@@ -112,7 +112,7 @@ public class MAPIMessage extends POIReadOnlyDocument {
* @exception IOException on errors reading, or invalid data
*/
public MAPIMessage(File file) throws IOException {
this(new NPOIFSFileSystem(file));
this(new POIFSFileSystem(file));
}

/**
@@ -125,7 +125,7 @@ public class MAPIMessage extends POIReadOnlyDocument {
* @exception IOException on errors reading, or invalid data
*/
public MAPIMessage(InputStream in) throws IOException {
this(new NPOIFSFileSystem(in));
this(new POIFSFileSystem(in));
}
/**
* Constructor for reading MSG Files from a POIFS filesystem
@@ -133,7 +133,7 @@ public class MAPIMessage extends POIReadOnlyDocument {
* @param fs Open POIFS FileSystem containing the message
* @exception IOException on errors reading, or invalid data
*/
public MAPIMessage(NPOIFSFileSystem fs) throws IOException {
public MAPIMessage(POIFSFileSystem fs) throws IOException {
this(fs.getRoot());
}
/**

+ 4
- 4
src/scratchpad/src/org/apache/poi/hsmf/dev/HSMFDump.java View File

@@ -27,14 +27,14 @@ import org.apache.poi.hsmf.datatypes.MAPIProperty;
import org.apache.poi.hsmf.datatypes.PropertiesChunk;
import org.apache.poi.hsmf.datatypes.PropertyValue;
import org.apache.poi.hsmf.parsers.POIFSChunkParser;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;

/**
* Dumps out the chunk details, and where possible contents
*/
public class HSMFDump {
private NPOIFSFileSystem fs;
public HSMFDump(NPOIFSFileSystem fs) {
private POIFSFileSystem fs;
public HSMFDump(POIFSFileSystem fs) {
this.fs = fs;
}
@@ -84,7 +84,7 @@ public class HSMFDump {
public static void main(String[] args) throws Exception {
for(String file : args) {
NPOIFSFileSystem fs = new NPOIFSFileSystem(new File(file), true);
POIFSFileSystem fs = new POIFSFileSystem(new File(file), true);
HSMFDump dump = new HSMFDump(fs);
dump.dump();
fs.close();

+ 12
- 22
src/scratchpad/src/org/apache/poi/hsmf/extractor/OutlookTextExtactor.java View File

@@ -30,7 +30,7 @@ import org.apache.poi.hsmf.datatypes.AttachmentChunks;
import org.apache.poi.hsmf.datatypes.StringChunk;
import org.apache.poi.hsmf.exceptions.ChunkNotFoundException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.LocaleUtil;
import org.apache.poi.util.StringUtil.StringsIterator;

@@ -45,7 +45,7 @@ public class OutlookTextExtactor extends POIOLE2TextExtractor {
public OutlookTextExtactor(DirectoryNode poifsDir) throws IOException {
this(new MAPIMessage(poifsDir));
}
public OutlookTextExtactor(NPOIFSFileSystem fs) throws IOException {
public OutlookTextExtactor(POIFSFileSystem fs) throws IOException {
this(new MAPIMessage(fs));
}
public OutlookTextExtactor(InputStream inp) throws IOException {
@@ -54,15 +54,9 @@ public class OutlookTextExtactor extends POIOLE2TextExtractor {
public static void main(String[] args) throws Exception {
for(String filename : args) {
NPOIFSFileSystem poifs = null;
OutlookTextExtactor extractor = null;
try {
poifs = new NPOIFSFileSystem(new File(filename));
extractor = new OutlookTextExtactor(poifs);
System.out.println( extractor.getText() );
} finally {
if (extractor != null) extractor.close();
if (poifs != null) poifs.close();
try (POIFSFileSystem poifs = new POIFSFileSystem(new File(filename));
OutlookTextExtactor extractor = new OutlookTextExtactor(poifs)) {
System.out.println(extractor.getText());
}
}
}
@@ -118,18 +112,14 @@ public class OutlookTextExtactor extends POIOLE2TextExtractor {
// First try via the proper chunk
SimpleDateFormat f = new SimpleDateFormat("E, d MMM yyyy HH:mm:ss Z", Locale.ROOT);
f.setTimeZone(LocaleUtil.getUserTimeZone());
s.append("Date: " + f.format(msg.getMessageDate().getTime()) + "\n");
s.append("Date: ").append(f.format(msg.getMessageDate().getTime())).append("\n");
} catch(ChunkNotFoundException e) {
try {
// Failing that try via the raw headers
String[] headers = msg.getHeaders();
for(String header: headers) {
if(startsWithIgnoreCase(header, "date:")) {
s.append(
"Date:" +
header.substring(header.indexOf(':')+1) +
"\n"
);
s.append("Date:").append(header, header.indexOf(':')+1, header.length()).append("\n");
break;
}
}
@@ -139,7 +129,7 @@ public class OutlookTextExtactor extends POIOLE2TextExtractor {
}
try {
s.append("Subject: " + msg.getSubject() + "\n");
s.append("Subject: ").append(msg.getSubject()).append("\n");
} catch(ChunkNotFoundException e) {}
// Display attachment names
@@ -153,11 +143,11 @@ public class OutlookTextExtactor extends POIOLE2TextExtractor {
att.getAttachMimeTag().getValue() != null) {
attName = att.getAttachMimeTag().getValue() + " = " + attName;
}
s.append("Attachment: " + attName + "\n");
s.append("Attachment: ").append(attName).append("\n");
}
try {
s.append("\n" + msg.getTextBody() + "\n");
s.append("\n").append(msg.getTextBody()).append("\n");
} catch(ChunkNotFoundException e) {}
return s.toString();
@@ -176,7 +166,7 @@ public class OutlookTextExtactor extends POIOLE2TextExtractor {
String[] names = displayText.split(";\\s*");
boolean first = true;
s.append(type + ": ");
s.append(type).append(": ");
for(String name : names) {
if(first) {
first = false;
@@ -190,7 +180,7 @@ public class OutlookTextExtactor extends POIOLE2TextExtractor {
// Append the email address in <>, assuming
// the name wasn't already the email address
if(! email.equals(name)) {
s.append( " <" + email + ">");
s.append(" <").append(email).append(">");
}
}
}

+ 5
- 9
src/scratchpad/src/org/apache/poi/hsmf/parsers/POIFSChunkParser.java View File

@@ -40,7 +40,7 @@ import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.DocumentInputStream;
import org.apache.poi.poifs.filesystem.DocumentNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;

@@ -52,7 +52,7 @@ import org.apache.poi.util.POILogger;
public final class POIFSChunkParser {
private final static POILogger logger = POILogFactory.getLogger(POIFSChunkParser.class);

public static ChunkGroup[] parse(NPOIFSFileSystem fs) throws IOException {
public static ChunkGroup[] parse(POIFSFileSystem fs) throws IOException {
return parse(fs.getRoot());
}
public static ChunkGroup[] parse(DirectoryNode node) throws IOException {
@@ -206,15 +206,11 @@ public final class POIFSChunkParser {
if(chunk != null) {
if(entry instanceof DocumentNode) {
DocumentInputStream inp = null;
try {
inp = new DocumentInputStream((DocumentNode)entry);
try (DocumentInputStream inp = new DocumentInputStream((DocumentNode) entry)) {
chunk.readValue(inp);
grouping.record(chunk);
} catch(IOException e) {
logger.log(POILogger.ERROR, "Error reading from part " + entry.getName() + " - " + e);
} finally {
if (inp != null) inp.close();
} catch (IOException e) {
logger.log(POILogger.ERROR, "Error reading from part " + entry.getName() + " - " + e);
}
} else {
grouping.record(chunk);

+ 23
- 30
src/scratchpad/src/org/apache/poi/hwpf/HWPFDocument.java View File

@@ -70,7 +70,6 @@ import org.apache.poi.poifs.crypt.standard.EncryptionRecord;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.EntryUtils;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.Internal;
@@ -90,26 +89,26 @@ public final class HWPFDocument extends HWPFDocumentCore {
private static final String STREAM_DATA = "Data";

/** table stream buffer*/
protected byte[] _tableStream;
private byte[] _tableStream;

/** data stream buffer*/
protected byte[] _dataStream;
private byte[] _dataStream;

/** Document wide Properties*/
protected DocumentProperties _dop;
private DocumentProperties _dop;

/** Contains text of the document wrapped in a obfuscated Word data
* structure*/
protected ComplexFileTable _cft;
private ComplexFileTable _cft;

/** Contains text buffer linked directly to single-piece document text piece */
protected StringBuilder _text;
private StringBuilder _text;

/** Holds the save history for this document. */
protected SavedByTable _sbt;
private SavedByTable _sbt;

/** Holds the revision mark authors for this document. */
protected RevisionMarkAuthorTable _rmat;
private RevisionMarkAuthorTable _rmat;

/** Holds FSBA (shape) information */
private FSPATable _fspaHeaders;
@@ -118,46 +117,40 @@ public final class HWPFDocument extends HWPFDocumentCore {
private FSPATable _fspaMain;

/** Escher Drawing Group information */
protected EscherRecordHolder _escherRecordHolder;
private EscherRecordHolder _escherRecordHolder;

/** Holds pictures table */
protected PicturesTable _pictures;
private PicturesTable _pictures;

/** Holds Office Art objects */
protected OfficeDrawingsImpl _officeDrawingsHeaders;
private OfficeDrawingsImpl _officeDrawingsHeaders;

/** Holds Office Art objects */
protected OfficeDrawingsImpl _officeDrawingsMain;
private OfficeDrawingsImpl _officeDrawingsMain;

/** Holds the bookmarks tables */
protected BookmarksTables _bookmarksTables;
private BookmarksTables _bookmarksTables;

/** Holds the bookmarks */
protected Bookmarks _bookmarks;
private Bookmarks _bookmarks;

/** Holds the ending notes tables */
protected NotesTables _endnotesTables = new NotesTables( NoteType.ENDNOTE );
private NotesTables _endnotesTables = new NotesTables( NoteType.ENDNOTE );

/** Holds the footnotes */
protected Notes _endnotes = new NotesImpl( _endnotesTables );
private Notes _endnotes = new NotesImpl( _endnotesTables );

/** Holds the footnotes tables */
protected NotesTables _footnotesTables = new NotesTables( NoteType.FOOTNOTE );
private NotesTables _footnotesTables = new NotesTables( NoteType.FOOTNOTE );

/** Holds the footnotes */
protected Notes _footnotes = new NotesImpl( _footnotesTables );
private Notes _footnotes = new NotesImpl( _footnotesTables );

/** Holds the fields PLCFs */
protected FieldsTables _fieldsTables;
private FieldsTables _fieldsTables;

/** Holds the fields */
protected Fields _fields;

protected HWPFDocument()
{
super();
this._text = new StringBuilder("\r");
}
private Fields _fields;

/**
* This constructor loads a Word document from an InputStream.
@@ -599,7 +592,7 @@ public final class HWPFDocument extends HWPFDocumentCore {
*/
@Override
public void write(File newFile) throws IOException {
NPOIFSFileSystem pfs = POIFSFileSystem.create(newFile);
POIFSFileSystem pfs = POIFSFileSystem.create(newFile);
write(pfs, true);
pfs.writeFilesystem();
}
@@ -618,12 +611,12 @@ public final class HWPFDocument extends HWPFDocumentCore {
*/
@Override
public void write(OutputStream out) throws IOException {
NPOIFSFileSystem pfs = new NPOIFSFileSystem();
POIFSFileSystem pfs = new POIFSFileSystem();
write(pfs, true);
pfs.writeFilesystem( out );
}

private void write(NPOIFSFileSystem pfs, boolean copyOtherEntries) throws IOException {
private void write(POIFSFileSystem pfs, boolean copyOtherEntries) throws IOException {
// clear the offsets and sizes in our FileInformationBlock.
_fib.clearOffsetsSizes();

@@ -999,7 +992,7 @@ public final class HWPFDocument extends HWPFDocumentCore {
return bos.toByteArray();
}
private static void write(NPOIFSFileSystem pfs, byte[] data, String name) throws IOException {
private static void write(POIFSFileSystem pfs, byte[] data, String name) throws IOException {
pfs.createOrUpdateDocument(new ByteArrayInputStream(data), name);
}


+ 2
- 3
src/scratchpad/testcases/org/apache/poi/TestPOIDocumentScratchpad.java View File

@@ -33,7 +33,6 @@ import org.apache.poi.hpsf.HPSFPropertiesOnlyDocument;
import org.apache.poi.hpsf.SummaryInformation;
import org.apache.poi.hslf.usermodel.HSLFSlideShowImpl;
import org.apache.poi.hwpf.HWPFTestDataSamples;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Before;
import org.junit.Test;
@@ -89,7 +88,7 @@ public final class TestPOIDocumentScratchpad {
@Test
public void testWriteProperties() throws IOException {
// Just check we can write them back out into a filesystem
NPOIFSFileSystem outFS = new NPOIFSFileSystem();
POIFSFileSystem outFS = new POIFSFileSystem();
doc.writeProperties(outFS);

// Should now hold them
@@ -103,7 +102,7 @@ public final class TestPOIDocumentScratchpad {
ByteArrayOutputStream baos = new ByteArrayOutputStream();

// Write them out
NPOIFSFileSystem outFS = new NPOIFSFileSystem();
POIFSFileSystem outFS = new POIFSFileSystem();
doc.writeProperties(outFS);
outFS.writeFilesystem(baos);


+ 2
- 4
src/scratchpad/testcases/org/apache/poi/hpbf/extractor/TestPublisherTextExtractor.java View File

@@ -26,7 +26,7 @@ import java.io.InputStream;

import org.apache.poi.POIDataSamples;
import org.apache.poi.hpbf.HPBFDocument;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Test;

public final class TestPublisherTextExtractor {
@@ -97,7 +97,7 @@ public final class TestPublisherTextExtractor {

// And with NPOIFS
sample = _samples.openResourceAsStream("Sample.pub");
NPOIFSFileSystem fs = new NPOIFSFileSystem(sample);
POIFSFileSystem fs = new POIFSFileSystem(sample);
HPBFDocument docNPOIFS = new HPBFDocument(fs);
ext = new PublisherTextExtractor(docNPOIFS);
assertEquals(SAMPLE_TEXT, ext.getText());
@@ -116,8 +116,6 @@ public final class TestPublisherTextExtractor {
/**
* We have the same file saved for Publisher 98, Publisher 2000 and
* Publisher 2007. Check they all agree.
*
* @throws Exception
*/
@Test
public void testMultipleVersions() throws Exception {

+ 2
- 3
src/scratchpad/testcases/org/apache/poi/hslf/extractor/TestExtractor.java View File

@@ -37,7 +37,6 @@ import org.apache.poi.hslf.usermodel.HSLFSlideShow;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.hwpf.HWPFDocument;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.sl.usermodel.ObjectShape;
@@ -376,14 +375,14 @@ public final class TestExtractor {

/**
* Tests that we can work with both {@link POIFSFileSystem}
* and {@link NPOIFSFileSystem}
* and {@link POIFSFileSystem}
*/
@SuppressWarnings("resource")
@Test
public void testDifferentPOIFS() throws IOException {
// Open the two filesystems
File pptFile = slTests.getFile("basic_test_ppt_file.ppt");
try (final NPOIFSFileSystem npoifs = new NPOIFSFileSystem(pptFile, true)) {
try (final POIFSFileSystem npoifs = new POIFSFileSystem(pptFile, true)) {
// Open directly
try (SlideShow<?,?> ppt = SlideShowFactory.create(npoifs.getRoot());
SlideShowExtractor<?,?> extractor = new SlideShowExtractor<>(ppt)) {

+ 7
- 10
src/scratchpad/testcases/org/apache/poi/hslf/record/TestDocumentEncryption.java View File

@@ -47,17 +47,14 @@ import org.apache.poi.poifs.crypt.EncryptionInfo;
import org.apache.poi.poifs.crypt.HashAlgorithm;
import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIDecryptor;
import org.apache.poi.poifs.crypt.cryptoapi.CryptoAPIEncryptionHeader;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

/**
* Tests that DocumentEncryption works properly.
*/
public class TestDocumentEncryption {
POIDataSamples slTests = POIDataSamples.getSlideShowInstance();
private static final POIDataSamples slTests = POIDataSamples.getSlideShowInstance();

@Test
public void cryptoAPIDecryptionOther() throws Exception {
@@ -70,7 +67,7 @@ public class TestDocumentEncryption {
Biff8EncryptionKey.setCurrentUserPassword("hello");
try {
for (String pptFile : encPpts) {
try (NPOIFSFileSystem fs = new NPOIFSFileSystem(slTests.getFile(pptFile), true);
try (POIFSFileSystem fs = new POIFSFileSystem(slTests.getFile(pptFile), true);
HSLFSlideShow ppt = new HSLFSlideShow(fs)) {
assertTrue(ppt.getSlides().size() > 0);
} catch (EncryptedPowerPointFileException e) {
@@ -86,7 +83,7 @@ public class TestDocumentEncryption {
public void cryptoAPIChangeKeySize() throws Exception {
String pptFile = "cryptoapi-proc2356.ppt";
Biff8EncryptionKey.setCurrentUserPassword("crypto");
try (NPOIFSFileSystem fs = new NPOIFSFileSystem(slTests.getFile(pptFile), true);
try (POIFSFileSystem fs = new POIFSFileSystem(slTests.getFile(pptFile), true);
HSLFSlideShowImpl hss = new HSLFSlideShowImpl(fs)) {
// need to cache data (i.e. read all data) before changing the key size
List<HSLFPictureData> picsExpected = hss.getPictureData();
@@ -99,7 +96,7 @@ public class TestDocumentEncryption {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
hss.write(bos);

try (NPOIFSFileSystem fs2 = new NPOIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()));
try (POIFSFileSystem fs2 = new POIFSFileSystem(new ByteArrayInputStream(bos.toByteArray()));
HSLFSlideShowImpl hss2 = new HSLFSlideShowImpl(fs2)) {
List<HSLFPictureData> picsActual = hss2.getPictureData();

@@ -121,7 +118,7 @@ public class TestDocumentEncryption {
ByteArrayOutputStream expected = new ByteArrayOutputStream();
ByteArrayOutputStream actual = new ByteArrayOutputStream();
try {
try (NPOIFSFileSystem fs = new NPOIFSFileSystem(slTests.getFile(pptFile), true);
try (POIFSFileSystem fs = new POIFSFileSystem(slTests.getFile(pptFile), true);
HSLFSlideShowImpl hss = new HSLFSlideShowImpl(fs)) {
hss.normalizeRecords();

@@ -135,7 +132,7 @@ public class TestDocumentEncryption {

// decrypted
ByteArrayInputStream bis = new ByteArrayInputStream(encrypted.toByteArray());
try (NPOIFSFileSystem fs = new NPOIFSFileSystem(bis);
try (POIFSFileSystem fs = new POIFSFileSystem(bis);
HSLFSlideShowImpl hss = new HSLFSlideShowImpl(fs)) {
Biff8EncryptionKey.setCurrentUserPassword(null);
hss.write(actual);
@@ -152,7 +149,7 @@ public class TestDocumentEncryption {
// taken from a msdn blog:
// http://blogs.msdn.com/b/openspecification/archive/2009/05/08/dominic-salemno.aspx
Biff8EncryptionKey.setCurrentUserPassword("crypto");
try (NPOIFSFileSystem fs = new NPOIFSFileSystem(slTests.getFile("cryptoapi-proc2356.ppt"));
try (POIFSFileSystem fs = new POIFSFileSystem(slTests.getFile("cryptoapi-proc2356.ppt"));
HSLFSlideShow ss = new HSLFSlideShow(fs)) {

HSLFSlide slide = ss.getSlides().get(0);

+ 15
- 20
src/scratchpad/testcases/org/apache/poi/hslf/usermodel/TestBugs.java View File

@@ -56,7 +56,6 @@ import org.apache.poi.ddf.EscherColorRef;
import org.apache.poi.ddf.EscherProperties;
import org.apache.poi.hslf.HSLFTestDataSamples;
import org.apache.poi.hslf.exceptions.OldPowerPointFormatException;
import org.apache.poi.hslf.extractor.PowerPointExtractor;
import org.apache.poi.hslf.model.HeadersFooters;
import org.apache.poi.hslf.record.DocInfoListContainer;
import org.apache.poi.hslf.record.Document;
@@ -68,7 +67,7 @@ import org.apache.poi.hslf.record.TextHeaderAtom;
import org.apache.poi.hslf.record.VBAInfoAtom;
import org.apache.poi.hslf.record.VBAInfoContainer;
import org.apache.poi.hssf.usermodel.DummyGraphics2d;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.macros.VBAMacroReader;
import org.apache.poi.sl.draw.DrawFactory;
import org.apache.poi.sl.draw.DrawPaint;
@@ -131,14 +130,14 @@ public final class TestBugs {
assertNotNull(notes);
txrun = notes.getTextParagraphs().get(0);
assertEquals("Notes-1", HSLFTextParagraph.getRawText(txrun));
assertEquals(false, txrun.get(0).getTextRuns().get(0).isBold());
assertFalse(txrun.get(0).getTextRuns().get(0).isBold());

//notes for the second slide are in bold
notes = ppt.getSlides().get(1).getNotes();
assertNotNull(notes);
txrun = notes.getTextParagraphs().get(0);
assertEquals("Notes-2", HSLFTextParagraph.getRawText(txrun));
assertEquals(true, txrun.get(0).getTextRuns().get(0).isBold());
assertTrue(txrun.get(0).getTextRuns().get(0).isBold());

ppt.close();
}
@@ -152,14 +151,14 @@ public final class TestBugs {

//map slide number and starting phrase of its notes
Map<Integer, String> notesMap = new HashMap<>();
notesMap.put(Integer.valueOf(4), "For decades before calculators");
notesMap.put(Integer.valueOf(5), "Several commercial applications");
notesMap.put(Integer.valueOf(6), "There are three variations of LNS that are discussed here");
notesMap.put(Integer.valueOf(7), "Although multiply and square root are easier");
notesMap.put(Integer.valueOf(8), "The bus Z is split into Z_H and Z_L");
notesMap.put(4, "For decades before calculators");
notesMap.put(5, "Several commercial applications");
notesMap.put(6, "There are three variations of LNS that are discussed here");
notesMap.put(7, "Although multiply and square root are easier");
notesMap.put(8, "The bus Z is split into Z_H and Z_L");

for (HSLFSlide slide : ppt.getSlides()) {
Integer slideNumber = Integer.valueOf(slide.getSlideNumber());
Integer slideNumber = slide.getSlideNumber();
HSLFNotes notes = slide.getNotes();
if (notesMap.containsKey(slideNumber)){
assertNotNull(notes);
@@ -412,7 +411,6 @@ public final class TestBugs {

/**
* PowerPoint 95 files should throw a more helpful exception
* @throws IOException
*/
@Test(expected=OldPowerPointFormatException.class)
public void bug41711() throws IOException {
@@ -632,7 +630,7 @@ public final class TestBugs {

@Test
public void bug45124() throws IOException {
SlideShow<?,?> ppt = open("bug45124.ppt");
HSLFSlideShow ppt = open("bug45124.ppt");
Slide<?,?> slide1 = ppt.getSlides().get(1);

TextBox<?,?> res = slide1.createTextBox();
@@ -647,7 +645,7 @@ public final class TestBugs {

tp.setBulletStyle(Color.red, 'A');

SlideShow<?,?> ppt2 = HSLFTestDataSamples.writeOutAndReadBack((HSLFSlideShow)ppt);
SlideShow<?,?> ppt2 = HSLFTestDataSamples.writeOutAndReadBack(ppt);
ppt.close();

res = (TextBox<?,?>)ppt2.getSlides().get(1).getShapes().get(1);
@@ -887,7 +885,7 @@ public final class TestBugs {
// For the test file, common sl draws textruns one by one and not mixed
// so we evaluate the whole iterator
Map<Attribute, Object> attributes = null;
StringBuffer sb = new StringBuffer();
StringBuilder sb = new StringBuilder();
for (char c = iterator.first();
c != CharacterIterator.DONE;
@@ -994,10 +992,10 @@ public final class TestBugs {
//It isn't pretty, but it works...
private Map<String, String> getMacrosFromHSLF(String fileName) throws IOException {
InputStream is = null;
NPOIFSFileSystem npoifs = null;
POIFSFileSystem npoifs = null;
try {
is = new FileInputStream(POIDataSamples.getSlideShowInstance().getFile(fileName));
npoifs = new NPOIFSFileSystem(is);
npoifs = new POIFSFileSystem(is);
//TODO: should we run the VBAMacroReader on this npoifs?
//TBD: We know that ppt typically don't store macros in the regular place,
//but _can_ they?
@@ -1011,11 +1009,8 @@ public final class TestBugs {
long persistId = vbaAtom.getPersistIdRef();
for (HSLFObjectData objData : ppt.getEmbeddedObjects()) {
if (objData.getExOleObjStg().getPersistId() == persistId) {
VBAMacroReader mr = new VBAMacroReader(objData.getInputStream());
try {
try (VBAMacroReader mr = new VBAMacroReader(objData.getInputStream())) {
return mr.readMacros();
} finally {
mr.close();
}
}
}

+ 6
- 6
src/scratchpad/testcases/org/apache/poi/hslf/usermodel/TestRichTextRun.java View File

@@ -33,7 +33,7 @@ import java.util.List;
import org.apache.poi.hslf.HSLFTestDataSamples;
import org.apache.poi.hslf.record.Record;
import org.apache.poi.hslf.record.SlideListWithText;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.junit.After;
import org.junit.Before;
@@ -396,7 +396,7 @@ public final class TestRichTextRun {
*/
private static void assertMatchesFileC(HSLFSlideShow s) throws IOException {
// Grab the bytes of the file
NPOIFSFileSystem fs = new NPOIFSFileSystem(HSLFTestDataSamples.openSampleFileStream(filenameC));
POIFSFileSystem fs = new POIFSFileSystem(HSLFTestDataSamples.openSampleFileStream(filenameC));
InputStream is = fs.createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT);
byte[] raw_file = IOUtils.toByteArray(is);
is.close();
@@ -405,7 +405,7 @@ public final class TestRichTextRun {
// Now write out the slideshow
ByteArrayOutputStream baos = new ByteArrayOutputStream();
s.write(baos);
fs = new NPOIFSFileSystem(new ByteArrayInputStream(baos.toByteArray()));
fs = new POIFSFileSystem(new ByteArrayInputStream(baos.toByteArray()));
is = fs.createDocumentInputStream(HSLFSlideShow.POWERPOINT_DOCUMENT);
byte[] raw_ss = IOUtils.toByteArray(is);
is.close();
@@ -515,7 +515,7 @@ public final class TestRichTextRun {
slide.addShape(shape);

assertEquals(42.0, tr.getFontSize(), 0);
assertEquals(true, rt.isBullet());
assertTrue(rt.isBullet());
assertEquals(50.0, rt.getLeftMargin(), 0);
assertEquals(0, rt.getIndent(), 0);
assertEquals('\u263A', (char)rt.getBulletChar());
@@ -530,7 +530,7 @@ public final class TestRichTextRun {
rt = shape.getTextParagraphs().get(0);
tr = rt.getTextRuns().get(0);
assertEquals(42.0, tr.getFontSize(), 0);
assertEquals(true, rt.isBullet());
assertTrue(rt.isBullet());
assertEquals(50.0, rt.getLeftMargin(), 0);
assertEquals(0, rt.getIndent(), 0);
assertEquals('\u263A', (char)rt.getBulletChar());
@@ -615,7 +615,7 @@ public final class TestRichTextRun {
}
@Test
public void testChineseParagraphs() throws Exception {
public void testChineseParagraphs() {
List<HSLFTextRun> rts;
HSLFTextRun rt;
List<List<HSLFTextParagraph>> txt;

+ 7
- 7
src/scratchpad/testcases/org/apache/poi/hsmf/TestFixedSizedProperties.java View File

@@ -43,7 +43,7 @@ import org.apache.poi.hsmf.datatypes.PropertyValue.LongPropertyValue;
import org.apache.poi.hsmf.datatypes.PropertyValue.TimePropertyValue;
import org.apache.poi.hsmf.dev.HSMFDump;
import org.apache.poi.hsmf.extractor.OutlookTextExtactor;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.LocaleUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -58,8 +58,8 @@ public final class TestFixedSizedProperties {
private static final String messageFails = "53784_fails.msg";
private static MAPIMessage mapiMessageSucceeds;
private static MAPIMessage mapiMessageFails;
private static NPOIFSFileSystem fsMessageSucceeds;
private static NPOIFSFileSystem fsMessageFails;
private static POIFSFileSystem fsMessageSucceeds;
private static POIFSFileSystem fsMessageFails;
private static SimpleDateFormat messageDateFormat;
private static TimeZone userTimeZone;

@@ -69,8 +69,8 @@ public final class TestFixedSizedProperties {
@BeforeClass
public static void initMapi() throws Exception {
POIDataSamples samples = POIDataSamples.getHSMFInstance();
fsMessageSucceeds = new NPOIFSFileSystem(samples.getFile(messageSucceeds));
fsMessageFails = new NPOIFSFileSystem(samples.getFile(messageFails));
fsMessageSucceeds = new POIFSFileSystem(samples.getFile(messageSucceeds));
fsMessageFails = new POIFSFileSystem(samples.getFile(messageFails));

mapiMessageSucceeds = new MAPIMessage(fsMessageSucceeds);
mapiMessageFails = new MAPIMessage(fsMessageFails);
@@ -95,7 +95,7 @@ public final class TestFixedSizedProperties {
* of our test files
*/
@Test
public void testPropertiesFound() throws Exception {
public void testPropertiesFound() {
Map<MAPIProperty,List<PropertyValue>> props;
props = mapiMessageSucceeds.getMainChunks().getProperties();
@@ -109,7 +109,7 @@ public final class TestFixedSizedProperties {
* Check we find properties of a variety of different types
*/
@Test
public void testPropertyValueTypes() throws Exception {
public void testPropertyValueTypes() {
Chunks mainChunks = mapiMessageSucceeds.getMainChunks();
// Ask to have the values looked up

+ 16
- 12
src/scratchpad/testcases/org/apache/poi/hsmf/extractor/TestOutlookTextExtractor.java View File

@@ -29,7 +29,7 @@ import java.util.TimeZone;

import org.apache.poi.POIDataSamples;
import org.apache.poi.hsmf.MAPIMessage;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.LocaleUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -56,7 +56,7 @@ public final class TestOutlookTextExtractor {
@Test
public void testQuick() throws Exception {
NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("quick.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
@@ -81,7 +81,7 @@ public final class TestOutlookTextExtractor {
@Test
public void testSimple() throws Exception {
NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
@@ -107,7 +107,7 @@ public final class TestOutlookTextExtractor {
ext.close();
fis.close();

NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
ext = new OutlookTextExtactor(poifs);
String poifsTxt = ext.getText();
ext.close();
@@ -141,7 +141,7 @@ public final class TestOutlookTextExtractor {
"example_sent_regular.msg", "example_sent_unicode.msg"
};
for(String file : files) {
NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile(file), true);
POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile(file), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
@@ -181,7 +181,7 @@ public final class TestOutlookTextExtractor {
"example_received_regular.msg", "example_received_unicode.msg"
};
for(String file : files) {
NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile(file), true);
POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile(file), true);
MAPIMessage msg = new MAPIMessage(poifs);

@@ -204,10 +204,12 @@ public final class TestOutlookTextExtractor {
}
/**
* See also {@link org.apache.poi.extractor.TestExtractorFactory#testEmbeded()}
* See also {@link org.apache.poi.extractor.ooxml.TestExtractorFactory#testEmbeded()}
*/
@SuppressWarnings("JavadocReference")
@Test
public void testWithAttachments() throws Exception {
NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("attachment_test_msg.msg"), true);
POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("attachment_test_msg.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
@@ -230,9 +232,10 @@ public final class TestOutlookTextExtractor {
ext.close();
poifs.close();
}

@Test
public void testWithAttachedMessage() throws Exception {
NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("58214_with_attachment.msg"), true);
POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("58214_with_attachment.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
String text = ext.getText();
@@ -248,9 +251,10 @@ public final class TestOutlookTextExtractor {
ext.close();
poifs.close();
}

@Test
public void testEncodings() throws Exception {
NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("chinese-traditional.msg"), true);
POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("chinese-traditional.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);
OutlookTextExtactor ext = new OutlookTextExtactor(msg);
String text = ext.getText();

+ 14
- 12
src/scratchpad/testcases/org/apache/poi/hsmf/parsers/TestPOIFSChunkParser.java View File

@@ -20,9 +20,11 @@ package org.apache.poi.hsmf.parsers;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Calendar;

@@ -38,7 +40,7 @@ import org.apache.poi.hsmf.datatypes.RecipientChunks.RecipientChunksSorter;
import org.apache.poi.hsmf.datatypes.StringChunk;
import org.apache.poi.hsmf.datatypes.Types;
import org.apache.poi.hsmf.exceptions.ChunkNotFoundException;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.LocaleUtil;
import org.junit.Test;

@@ -50,7 +52,7 @@ public final class TestPOIFSChunkParser {

@Test
public void testFindsCore() throws IOException, ChunkNotFoundException {
NPOIFSFileSystem simple = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
POIFSFileSystem simple = new POIFSFileSystem(samples.getFile("quick.msg"), true);

// Check a few core things are present
simple.getRoot().getEntry(
@@ -77,7 +79,7 @@ public final class TestPOIFSChunkParser {

@Test
public void testFindsRecips() throws IOException, ChunkNotFoundException {
NPOIFSFileSystem simple = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
POIFSFileSystem simple = new POIFSFileSystem(samples.getFile("quick.msg"), true);

simple.getRoot().getEntry("__recip_version1.0_#00000000");

@@ -92,7 +94,7 @@ public final class TestPOIFSChunkParser {
assertEquals("/O=HOSTEDSERVICE2/OU=FIRST ADMINISTRATIVE GROUP/CN=RECIPIENTS/CN=Kevin.roast@ben",
recips.recipientEmailChunk.getValue());

String search = new String(recips.recipientSearchChunk.getValue(), "ASCII");
String search = new String(recips.recipientSearchChunk.getValue(), StandardCharsets.US_ASCII);
assertEquals("CN=KEVIN.ROAST@BEN\0", search.substring(search.length()-19));

// Now via MAPIMessage
@@ -116,14 +118,14 @@ public final class TestPOIFSChunkParser {


// Now look at another message
simple = new NPOIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
simple = new POIFSFileSystem(samples.getFile("simple_test_msg.msg"), true);
msg = new MAPIMessage(simple);
assertNotNull(msg.getRecipientDetailsChunks());
assertEquals(1, msg.getRecipientDetailsChunks().length);

assertEquals("SMTP", msg.getRecipientDetailsChunks()[0].deliveryTypeChunk.getValue());
assertEquals(null, msg.getRecipientDetailsChunks()[0].recipientSMTPChunk);
assertEquals(null, msg.getRecipientDetailsChunks()[0].recipientNameChunk);
assertNull(msg.getRecipientDetailsChunks()[0].recipientSMTPChunk);
assertNull(msg.getRecipientDetailsChunks()[0].recipientNameChunk);
assertEquals("travis@overwrittenstack.com", msg.getRecipientDetailsChunks()[0].recipientEmailChunk.getValue());
assertEquals("travis@overwrittenstack.com", msg.getRecipientEmailAddress());

@@ -133,7 +135,7 @@ public final class TestPOIFSChunkParser {

@Test
public void testFindsMultipleRecipients() throws IOException, ChunkNotFoundException {
NPOIFSFileSystem multiple = new NPOIFSFileSystem(samples.getFile("example_received_unicode.msg"), true);
POIFSFileSystem multiple = new POIFSFileSystem(samples.getFile("example_received_unicode.msg"), true);

multiple.getRoot().getEntry("__recip_version1.0_#00000000");
multiple.getRoot().getEntry("__recip_version1.0_#00000001");
@@ -226,7 +228,7 @@ public final class TestPOIFSChunkParser {

@Test
public void testFindsNameId() throws IOException {
NPOIFSFileSystem simple = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
POIFSFileSystem simple = new POIFSFileSystem(samples.getFile("quick.msg"), true);

simple.getRoot().getEntry("__nameid_version1.0");

@@ -250,8 +252,8 @@ public final class TestPOIFSChunkParser {

@Test
public void testFindsAttachments() throws IOException, ChunkNotFoundException {
NPOIFSFileSystem with = new NPOIFSFileSystem(samples.getFile("attachment_test_msg.msg"), true);
NPOIFSFileSystem without = new NPOIFSFileSystem(samples.getFile("quick.msg"), true);
POIFSFileSystem with = new POIFSFileSystem(samples.getFile("attachment_test_msg.msg"), true);
POIFSFileSystem without = new POIFSFileSystem(samples.getFile("quick.msg"), true);
AttachmentChunks attachment;


@@ -326,7 +328,7 @@ public final class TestPOIFSChunkParser {
*/
@Test
public void testOlk10SideProps() throws IOException, ChunkNotFoundException {
NPOIFSFileSystem poifs = new NPOIFSFileSystem(samples.getFile("51873.msg"), true);
POIFSFileSystem poifs = new POIFSFileSystem(samples.getFile("51873.msg"), true);
MAPIMessage msg = new MAPIMessage(poifs);

// Check core details came through

+ 2
- 3
src/scratchpad/testcases/org/apache/poi/hwpf/extractor/TestWordExtractor.java View File

@@ -34,7 +34,6 @@ import org.apache.poi.hwpf.HWPFTestDataSamples;
import org.apache.poi.hwpf.OldWordFileFormatException;
import org.apache.poi.poifs.filesystem.DirectoryNode;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.StringUtil;
import org.junit.Test;
@@ -338,13 +337,13 @@ public final class TestWordExtractor {
/**
* Tests that we can work with both {@link POIFSFileSystem}
* and {@link NPOIFSFileSystem}
* and {@link POIFSFileSystem}
*/
@Test
public void testDifferentPOIFS() throws Exception {
// Open the two filesystems
File file = docTests.getFile("test2.doc");
try (NPOIFSFileSystem npoifs = new NPOIFSFileSystem(file, true)) {
try (POIFSFileSystem npoifs = new POIFSFileSystem(file, true)) {

DirectoryNode dir = npoifs.getRoot();


+ 2
- 2
src/scratchpad/testcases/org/apache/poi/hwpf/extractor/TestWordExtractorBugs.java View File

@@ -20,7 +20,7 @@ package org.apache.poi.hwpf.extractor;
import org.apache.poi.POIDataSamples;
import org.apache.poi.extractor.POITextExtractor;
import org.apache.poi.extractor.OLE2ExtractorFactory;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.junit.Test;

import java.io.IOException;
@@ -60,7 +60,7 @@ public final class TestWordExtractorBugs {

@Test
public void testBug60374() throws Exception {
NPOIFSFileSystem fs = new NPOIFSFileSystem(SAMPLES.openResourceAsStream("cn.orthodox.www_divenbog_APRIL_30-APRIL.DOC"));
POIFSFileSystem fs = new POIFSFileSystem(SAMPLES.openResourceAsStream("cn.orthodox.www_divenbog_APRIL_30-APRIL.DOC"));
final POITextExtractor extractor = OLE2ExtractorFactory.createExtractor(fs);

// Check it gives text without error

+ 22
- 36
src/scratchpad/testcases/org/apache/poi/hwpf/usermodel/TestBugs.java View File

@@ -25,6 +25,7 @@ import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
@@ -39,7 +40,6 @@ import org.apache.poi.hwpf.converter.WordToTextConverter;
import org.apache.poi.hwpf.extractor.Word6Extractor;
import org.apache.poi.hwpf.extractor.WordExtractor;
import org.apache.poi.hwpf.model.*;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.POILogFactory;
@@ -56,8 +56,7 @@ public class TestBugs{

private static final POILogger logger = POILogFactory.getLogger(TestBugs.class);

public static void assertEqualsIgnoreNewline(String expected, String actual )
{
private static void assertEqualsIgnoreNewline(String expected, String actual) {
String newExpected = expected.replaceAll("\r\n", "\n" )
.replaceAll("\r", "\n").trim();
String newActual = actual.replaceAll("\r\n", "\n" )
@@ -110,17 +109,6 @@ public class TestBugs{
doc.close();
}
}
private String getTextOldFile(String samplefile) throws IOException {
HWPFOldDocument doc = HWPFTestDataSamples.openOldSampleFile(samplefile);
Word6Extractor extractor = new Word6Extractor(doc);
try {
return extractor.getText();
} finally {
extractor.close();
doc.close();
}
}

/**
* Bug 33519 - HWPF fails to read a file
@@ -448,7 +436,7 @@ public class TestBugs{
try (InputStream is = POIDataSamples.getDocumentInstance()
.openResourceAsStream("Bug47742-text.txt")) {
byte[] expectedBytes = IOUtils.toByteArray(is);
String expectedText = new String(expectedBytes, "utf-8")
String expectedText = new String(expectedBytes, StandardCharsets.UTF_8)
.substring(1); // strip-off the unicode marker

assertEqualsIgnoreNewline(expectedText, foundText);
@@ -486,11 +474,11 @@ public class TestBugs{
}

@Test
public void test49933() throws IOException
{
String text = getTextOldFile("Bug49933.doc");
assertContains(text, "best.wine.jump.ru");
public void test49933() throws IOException {
try (HWPFOldDocument doc = HWPFTestDataSamples.openOldSampleFile("Bug49933.doc");
Word6Extractor extractor = new Word6Extractor(doc)) {
assertContains(extractor.getText(), "best.wine.jump.ru");
}
}

/**
@@ -544,8 +532,7 @@ public class TestBugs{
* release from download site )
*/
@Test
public void test51604p2() throws Exception
{
public void test51604p2() {
HWPFDocument doc = HWPFTestDataSamples.openSampleFile("Bug51604.doc");

Range range = doc.getRange();
@@ -627,7 +614,7 @@ public class TestBugs{
{
InputStream is = POIDataSamples.getDocumentInstance()
.openResourceAsStream("empty.doc");
try (NPOIFSFileSystem npoifsFileSystem = new NPOIFSFileSystem(is)) {
try (POIFSFileSystem npoifsFileSystem = new POIFSFileSystem(is)) {
HWPFDocument hwpfDocument = new HWPFDocument(
npoifsFileSystem.getRoot());
hwpfDocument.write(new ByteArrayOutputStream());
@@ -679,8 +666,7 @@ public class TestBugs{
* corrupt document
*/
@Test
public void testBug51834() throws Exception
{
public void testBug51834() {
/*
* we don't have Java test for this file - it should be checked using
* Microsoft BFF Validator. But check read-write-read anyway. -- sergey
@@ -773,7 +759,7 @@ public class TestBugs{
* Disabled pending a fix for the bug
*/
@Test
public void test56880() throws Exception {
public void test56880() {
HWPFDocument doc =
HWPFTestDataSamples.openSampleFile("56880.doc");
assertEqualsIgnoreNewline("Check Request", doc.getRange().text());
@@ -787,20 +773,12 @@ public class TestBugs{
{
assertNotNull(getText("Bug61268.doc"));
}
// These are the values the are expected to be read when the file
// is checked.
private final int section1LeftMargin = 1440;
private final int section1RightMargin = 1440;
private final int section1TopMargin = 1440;
private final int section1BottomMargin = 1440;
private final int section1NumColumns = 1;

private int section2LeftMargin = 1440;
private int section2RightMargin = 1440;
private int section2TopMargin = 1440;
private int section2BottomMargin = 1440;
private final int section2NumColumns = 3;

@Test
@SuppressWarnings("SuspiciousNameCombination")
public void testHWPFSections() {
@@ -854,10 +832,17 @@ public class TestBugs{

@SuppressWarnings("Duplicates")
private void assertSection1Margin(Section section) {
int section1BottomMargin = 1440;
assertEquals(section1BottomMargin, section.getMarginBottom());
// These are the values the are expected to be read when the file
// is checked.
int section1LeftMargin = 1440;
assertEquals(section1LeftMargin, section.getMarginLeft());
int section1RightMargin = 1440;
assertEquals(section1RightMargin, section.getMarginRight());
int section1TopMargin = 1440;
assertEquals(section1TopMargin, section.getMarginTop());
int section1NumColumns = 1;
assertEquals(section1NumColumns, section.getNumColumns());
}

@@ -867,6 +852,7 @@ public class TestBugs{
assertEquals(section2LeftMargin, section.getMarginLeft());
assertEquals(section2RightMargin, section.getMarginRight());
assertEquals(section2TopMargin, section.getMarginTop());
int section2NumColumns = 3;
assertEquals(section2NumColumns, section.getNumColumns());
}


+ 3
- 4
src/scratchpad/testcases/org/apache/poi/hwpf/usermodel/TestHWPFWrite.java View File

@@ -31,7 +31,6 @@ import org.apache.poi.POIDataSamples;
import org.apache.poi.hwpf.HWPFDocument;
import org.apache.poi.hwpf.HWPFTestCase;
import org.apache.poi.hwpf.HWPFTestDataSamples;
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.TempFile;
@@ -104,7 +103,7 @@ public final class TestHWPFWrite extends HWPFTestCase {
}

// Open from the temp file in read-write mode
NPOIFSFileSystem poifs = new NPOIFSFileSystem(file, false);
POIFSFileSystem poifs = new POIFSFileSystem(file, false);
HWPFDocument doc = new HWPFDocument(poifs.getRoot());
Range r = doc.getRange();
assertEquals("I am a test document\r", r.getParagraph(0).text());
@@ -117,7 +116,7 @@ public final class TestHWPFWrite extends HWPFTestCase {
doc.close();
poifs.close();

poifs = new NPOIFSFileSystem(file);
poifs = new POIFSFileSystem(file);
doc = new HWPFDocument(poifs.getRoot());
r = doc.getRange();
assertEquals("X XX a test document\r", r.getParagraph(0).text());
@@ -138,7 +137,7 @@ public final class TestHWPFWrite extends HWPFTestCase {
@Test(expected=IllegalStateException.class)
public void testInvalidInPlaceWriteNPOIFS() throws Exception {
// Can't work for Read-Only files
NPOIFSFileSystem fs = new NPOIFSFileSystem(SAMPLES.getFile("SampleDoc.doc"), true);
POIFSFileSystem fs = new POIFSFileSystem(SAMPLES.getFile("SampleDoc.doc"), true);
HWPFDocument doc = new HWPFDocument(fs.getRoot());
try {
doc.write();

+ 0
- 0
src/testcases/org/apache/poi/TestPOIDocumentMain.java View File


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save