*/
public final class HSLFSlideShowImpl extends POIDocument implements Closeable {
public static final int UNSET_OFFSET = -1;
-
+
// For logging
private POILogger logger = POILogFactory.getLogger(this.getClass());
- // Holds metadata on where things are in our document
- private CurrentUserAtom currentUser;
+ // Holds metadata on where things are in our document
+ private CurrentUserAtom currentUser;
- // Low level contents of the file
- private byte[] _docstream;
+ // Low level contents of the file
+ private byte[] _docstream;
- // Low level contents
- private Record[] _records;
+ // Low level contents
+ private Record[] _records;
- // Raw Pictures contained in the pictures stream
- private List<HSLFPictureData> _pictures;
+ // Raw Pictures contained in the pictures stream
+ private List<HSLFPictureData> _pictures;
// Embedded objects stored in storage records in the document stream, lazily populated.
private HSLFObjectData[] _objects;
-
+
/**
- * Returns the directory in the underlying POIFSFileSystem for the
- * document that is open.
+ * Returns the directory in the underlying POIFSFileSystem for the
+ * document that is open.
*/
protected DirectoryNode getPOIFSDirectory() {
- return directory;
+ return directory;
}
- /**
- * Constructs a Powerpoint document from fileName. Parses the document
- * and places all the important stuff into data structures.
- *
- * @param fileName The name of the file to read.
- * @throws IOException if there is a problem while parsing the document.
- */
+ /**
+ * Constructs a Powerpoint document from fileName. Parses the document
+ * and places all the important stuff into data structures.
+ *
+ * @param fileName The name of the file to read.
+ * @throws IOException if there is a problem while parsing the document.
+ */
+ @SuppressWarnings("resource")
+ public HSLFSlideShowImpl(String fileName) throws IOException {
+ this(new POIFSFileSystem(new File(fileName)));
+ }
+
+ /**
+ * Constructs a Powerpoint document from an input stream. Parses the
+ * document and places all the important stuff into data structures.
+ *
+ * @param inputStream the source of the data
+ * @throws IOException if there is a problem while parsing the document.
+ */
@SuppressWarnings("resource")
- public HSLFSlideShowImpl(String fileName) throws IOException {
- this(new POIFSFileSystem(new File(fileName)));
- }
-
- /**
- * Constructs a Powerpoint document from an input stream. Parses the
- * document and places all the important stuff into data structures.
- *
- * @param inputStream the source of the data
- * @throws IOException if there is a problem while parsing the document.
- */
- @SuppressWarnings("resource")
public HSLFSlideShowImpl(InputStream inputStream) throws IOException {
- //do Ole stuff
- this(new POIFSFileSystem(inputStream));
- }
-
- /**
- * Constructs a Powerpoint document from a POIFS Filesystem. Parses the
- * document and places all the important stuff into data structures.
- *
- * @param filesystem the POIFS FileSystem to read from
- * @throws IOException if there is a problem while parsing the document.
- */
- public HSLFSlideShowImpl(POIFSFileSystem filesystem) throws IOException {
- this(filesystem.getRoot());
- }
+ //do Ole stuff
+ this(new POIFSFileSystem(inputStream));
+ }
+
+ /**
+ * Constructs a Powerpoint document from a POIFS Filesystem. Parses the
+ * document and places all the important stuff into data structures.
+ *
+ * @param filesystem the POIFS FileSystem to read from
+ * @throws IOException if there is a problem while parsing the document.
+ */
+ public HSLFSlideShowImpl(POIFSFileSystem filesystem) throws IOException {
+ this(filesystem.getRoot());
+ }
/**
* Constructs a Powerpoint document from a POIFS Filesystem. Parses the
this(filesystem.getRoot());
}
- /**
- * Constructs a Powerpoint document from a specific point in a
- * POIFS Filesystem. Parses the document and places all the
- * important stuff into data structures.
- *
- * @param dir the POIFS directory to read from
- * @throws IOException if there is a problem while parsing the document.
- */
- public HSLFSlideShowImpl(DirectoryNode dir) throws IOException {
- super(handleDualStorage(dir));
-
- // First up, grab the "Current User" stream
- // We need this before we can detect Encrypted Documents
- readCurrentUserStream();
-
- // Next up, grab the data that makes up the
- // PowerPoint stream
- readPowerPointStream();
-
- // Now, build records based on the PowerPoint stream
- buildRecords();
-
- // Look for any other streams
- readOtherStreams();
- }
-
- private static DirectoryNode handleDualStorage(DirectoryNode dir) throws IOException {
- // when there's a dual storage entry, use it, as the outer document can't be read quite probably ...
- String dualName = "PP97_DUALSTORAGE";
- if (!dir.hasEntry(dualName)) return dir;
- dir = (DirectoryNode)dir.getEntry(dualName);
- return dir;
- }
-
- /**
- * Constructs a new, empty, Powerpoint document.
- */
- public static final HSLFSlideShowImpl create() {
- InputStream is = HSLFSlideShowImpl.class.getResourceAsStream("/org/apache/poi/hslf/data/empty.ppt");
- if (is == null) {
- throw new HSLFException("Missing resource 'empty.ppt'");
- }
- try {
- try {
- return new HSLFSlideShowImpl(is);
- } finally {
+ /**
+ * Constructs a Powerpoint document from a specific point in a
+ * POIFS Filesystem. Parses the document and places all the
+ * important stuff into data structures.
+ *
+ * @param dir the POIFS directory to read from
+ * @throws IOException if there is a problem while parsing the document.
+ */
+ public HSLFSlideShowImpl(DirectoryNode dir) throws IOException {
+ super(handleDualStorage(dir));
+
+ // First up, grab the "Current User" stream
+ // We need this before we can detect Encrypted Documents
+ readCurrentUserStream();
+
+ // Next up, grab the data that makes up the
+ // PowerPoint stream
+ readPowerPointStream();
+
+ // Now, build records based on the PowerPoint stream
+ buildRecords();
+
+ // Look for any other streams
+ readOtherStreams();
+ }
+
+ private static DirectoryNode handleDualStorage(DirectoryNode dir) throws IOException {
+ // when there's a dual storage entry, use it, as the outer document can't be read quite probably ...
+ String dualName = "PP97_DUALSTORAGE";
+ if (!dir.hasEntry(dualName)) return dir;
+ dir = (DirectoryNode) dir.getEntry(dualName);
+ return dir;
+ }
+
+ /**
+ * Constructs a new, empty, Powerpoint document.
+ */
+ public static final HSLFSlideShowImpl create() {
+ InputStream is = HSLFSlideShowImpl.class.getResourceAsStream("/org/apache/poi/hslf/data/empty.ppt");
+ if (is == null) {
+ throw new HSLFException("Missing resource 'empty.ppt'");
+ }
+ try {
+ try {
+ return new HSLFSlideShowImpl(is);
+ } finally {
is.close();
}
- } catch (IOException e) {
- throw new HSLFException(e);
- }
- }
-
- /**
- * Extracts the main PowerPoint document stream from the
- * POI file, ready to be passed
- *
- * @throws IOException
- */
- private void readPowerPointStream() throws IOException
- {
- // Get the main document stream
- DocumentEntry docProps =
- (DocumentEntry)directory.getEntry("PowerPoint Document");
-
- // Grab the document stream
- int len = docProps.getSize();
+ } catch (IOException e) {
+ throw new HSLFException(e);
+ }
+ }
+
+ /**
+ * Extracts the main PowerPoint document stream from the
+ * POI file, ready to be passed
+ *
+ * @throws IOException
+ */
+ private void readPowerPointStream() throws IOException {
+ // Get the main document stream
+ DocumentEntry docProps =
+ (DocumentEntry) directory.getEntry("PowerPoint Document");
+
+ // Grab the document stream
+ int len = docProps.getSize();
InputStream is = directory.createDocumentInputStream("PowerPoint Document");
try {
_docstream = IOUtils.toByteArray(is, len);
} finally {
is.close();
}
- }
-
- /**
- * Builds the list of records, based on the contents
- * of the PowerPoint stream
- */
- private void buildRecords()
- {
- // The format of records in a powerpoint file are:
- // <little endian 2 byte "info">
- // <little endian 2 byte "type">
- // <little endian 4 byte "length">
- // If it has a zero length, following it will be another record
- // <xx xx yy yy 00 00 00 00> <xx xx yy yy zz zz zz zz>
- // If it has a length, depending on its type it may have children or data
- // If it has children, these will follow straight away
- // <xx xx yy yy zz zz zz zz <xx xx yy yy zz zz zz zz>>
- // If it has data, this will come straigh after, and run for the length
- // <xx xx yy yy zz zz zz zz dd dd dd dd dd dd dd>
- // All lengths given exclude the 8 byte record header
- // (Data records are known as Atoms)
-
- // Document should start with:
- // 0F 00 E8 03 ## ## ## ##
- // (type 1000 = document, info 00 0f is normal, rest is document length)
- // 01 00 E9 03 28 00 00 00
- // (type 1001 = document atom, info 00 01 normal, 28 bytes long)
- // 80 16 00 00 E0 10 00 00 xx xx xx xx xx xx xx xx
- // 05 00 00 00 0A 00 00 00 xx xx xx
- // (the contents of the document atom, not sure what it means yet)
- // (records then follow)
-
- // When parsing a document, look to see if you know about that type
- // of the current record. If you know it's a type that has children,
- // process the record's data area looking for more records
- // If you know about the type and it doesn't have children, either do
- // something with the data (eg TextRun) or skip over it
- // If you don't know about the type, play safe and skip over it (using
- // its length to know where the next record will start)
- //
-
- _records = read(_docstream, (int)currentUser.getCurrentEditOffset());
- }
-
- private Record[] read(byte[] docstream, int usrOffset){
+ }
+
+ /**
+ * Builds the list of records, based on the contents
+ * of the PowerPoint stream
+ */
+ private void buildRecords() {
+ // The format of records in a powerpoint file are:
+ // <little endian 2 byte "info">
+ // <little endian 2 byte "type">
+ // <little endian 4 byte "length">
+ // If it has a zero length, following it will be another record
+ // <xx xx yy yy 00 00 00 00> <xx xx yy yy zz zz zz zz>
+ // If it has a length, depending on its type it may have children or data
+ // If it has children, these will follow straight away
+ // <xx xx yy yy zz zz zz zz <xx xx yy yy zz zz zz zz>>
+ // If it has data, this will come straigh after, and run for the length
+ // <xx xx yy yy zz zz zz zz dd dd dd dd dd dd dd>
+ // All lengths given exclude the 8 byte record header
+ // (Data records are known as Atoms)
+
+ // Document should start with:
+ // 0F 00 E8 03 ## ## ## ##
+ // (type 1000 = document, info 00 0f is normal, rest is document length)
+ // 01 00 E9 03 28 00 00 00
+ // (type 1001 = document atom, info 00 01 normal, 28 bytes long)
+ // 80 16 00 00 E0 10 00 00 xx xx xx xx xx xx xx xx
+ // 05 00 00 00 0A 00 00 00 xx xx xx
+ // (the contents of the document atom, not sure what it means yet)
+ // (records then follow)
+
+ // When parsing a document, look to see if you know about that type
+ // of the current record. If you know it's a type that has children,
+ // process the record's data area looking for more records
+ // If you know about the type and it doesn't have children, either do
+ // something with the data (eg TextRun) or skip over it
+ // If you don't know about the type, play safe and skip over it (using
+ // its length to know where the next record will start)
+ //
+
+ _records = read(_docstream, (int) currentUser.getCurrentEditOffset());
+ }
+
+ private Record[] read(byte[] docstream, int usrOffset) {
//sort found records by offset.
//(it is not necessary but SlideShow.findMostRecentCoreRecords() expects them sorted)
- NavigableMap<Integer,Record> records = new TreeMap<Integer,Record>(); // offset -> record
- Map<Integer,Integer> persistIds = new HashMap<Integer,Integer>(); // offset -> persistId
+ NavigableMap<Integer, Record> records = new TreeMap<Integer, Record>(); // offset -> record
+ Map<Integer, Integer> persistIds = new HashMap<Integer, Integer>(); // offset -> persistId
initRecordOffsets(docstream, usrOffset, records, persistIds);
HSLFSlideShowEncrypted decryptData = new HSLFSlideShowEncrypted(docstream, records);
-
- for (Map.Entry<Integer,Record> entry : records.entrySet()) {
+
+ for (Map.Entry<Integer, Record> entry : records.entrySet()) {
Integer offset = entry.getKey();
Record record = entry.getValue();
Integer persistId = persistIds.get(offset);
record = Record.buildRecordAtOffset(docstream, offset);
entry.setValue(record);
}
-
+
if (record instanceof PersistRecord) {
- ((PersistRecord)record).setPersistId(persistId);
- }
+ ((PersistRecord) record).setPersistId(persistId);
+ }
}
-
+
return records.values().toArray(new Record[records.size()]);
}
- private void initRecordOffsets(byte[] docstream, int usrOffset, NavigableMap<Integer,Record> recordMap, Map<Integer,Integer> offset2id) {
- while (usrOffset != 0){
+ private void initRecordOffsets(byte[] docstream, int usrOffset, NavigableMap<Integer, Record> recordMap, Map<Integer, Integer> offset2id) {
+ while (usrOffset != 0) {
UserEditAtom usr = (UserEditAtom) Record.buildRecordAtOffset(docstream, usrOffset);
recordMap.put(usrOffset, usr);
-
+
int psrOffset = usr.getPersistPointersOffset();
- PersistPtrHolder ptr = (PersistPtrHolder)Record.buildRecordAtOffset(docstream, psrOffset);
+ PersistPtrHolder ptr = (PersistPtrHolder) Record.buildRecordAtOffset(docstream, psrOffset);
recordMap.put(psrOffset, ptr);
-
- for(Map.Entry<Integer,Integer> entry : ptr.getSlideLocationsLookup().entrySet()) {
+
+ for (Map.Entry<Integer, Integer> entry : ptr.getSlideLocationsLookup().entrySet()) {
Integer offset = entry.getValue();
Integer id = entry.getKey();
recordMap.put(offset, null); // reserve a slot for the record
offset2id.put(offset, id);
}
-
+
usrOffset = usr.getLastUserEditAtomOffset();
// check for corrupted user edit atom and try to repair it
// if the next user edit atom offset is already known, we would go into an endless loop
if (usrOffset > 0 && recordMap.containsKey(usrOffset)) {
// a user edit atom is usually located 36 byte before the smallest known record offset
- usrOffset = recordMap.firstKey()-36;
+ usrOffset = recordMap.firstKey() - 36;
// check that we really are located on a user edit atom
int ver_inst = LittleEndian.getUShort(docstream, usrOffset);
- int type = LittleEndian.getUShort(docstream, usrOffset+2);
- int len = LittleEndian.getInt(docstream, usrOffset+4);
+ int type = LittleEndian.getUShort(docstream, usrOffset + 2);
+ int len = LittleEndian.getInt(docstream, usrOffset + 4);
if (ver_inst == 0 && type == 4085 && (len == 0x1C || len == 0x20)) {
logger.log(POILogger.WARN, "Repairing invalid user edit atom");
usr.setLastUserEditAtomOffset(usrOffset);
throw new CorruptPowerPointFileException("Powerpoint document contains invalid user edit atom");
}
}
- }
+ }
}
public DocumentEncryptionAtom getDocumentEncryptionAtom() {
for (Record r : _records) {
if (r instanceof DocumentEncryptionAtom) {
- return (DocumentEncryptionAtom)r;
+ return (DocumentEncryptionAtom) r;
}
}
return null;
}
-
-
- /**
- * Find the "Current User" stream, and load it
- */
- private void readCurrentUserStream() {
- try {
- currentUser = new CurrentUserAtom(directory);
- } catch(IOException ie) {
- logger.log(POILogger.ERROR, "Error finding Current User Atom:\n" + ie);
- currentUser = new CurrentUserAtom();
- }
- }
-
- /**
- * Find any other streams from the filesystem, and load them
- */
- private void readOtherStreams() {
- // Currently, there aren't any
- }
-
- /**
- * Find and read in pictures contained in this presentation.
- * This is lazily called as and when we want to touch pictures.
- */
- private void readPictures() throws IOException {
+
+
+ /**
+ * Find the "Current User" stream, and load it
+ */
+ private void readCurrentUserStream() {
+ try {
+ currentUser = new CurrentUserAtom(directory);
+ } catch (IOException ie) {
+ logger.log(POILogger.ERROR, "Error finding Current User Atom:\n" + ie);
+ currentUser = new CurrentUserAtom();
+ }
+ }
+
+ /**
+ * Find any other streams from the filesystem, and load them
+ */
+ private void readOtherStreams() {
+ // Currently, there aren't any
+ }
+
+ /**
+ * Find and read in pictures contained in this presentation.
+ * This is lazily called as and when we want to touch pictures.
+ */
+ private void readPictures() throws IOException {
_pictures = new ArrayList<HSLFPictureData>();
// if the presentation doesn't contain pictures - will use a null set instead
if (!directory.hasEntry("Pictures")) return;
HSLFSlideShowEncrypted decryptData = new HSLFSlideShowEncrypted(getDocumentEncryptionAtom());
-
- DocumentEntry entry = (DocumentEntry)directory.getEntry("Pictures");
+
+ DocumentEntry entry = (DocumentEntry) directory.getEntry("Pictures");
DocumentInputStream is = directory.createDocumentInputStream(entry);
- byte[] pictstream = IOUtils.toByteArray(is, entry.getSize());
- is.close();
+ byte[] pictstream = IOUtils.toByteArray(is, entry.getSize());
+ is.close();
int pos = 0;
- // An empty picture record (length 0) will take up 8 bytes
- while (pos <= (pictstream.length-8)) {
+ // An empty picture record (length 0) will take up 8 bytes
+ while (pos <= (pictstream.length - 8)) {
int offset = pos;
decryptData.decryptPicture(pictstream, offset);
-
+
// Image signature
int signature = LittleEndian.getUShort(pictstream, pos);
pos += LittleEndian.SHORT_SIZE;
if (!((type == 0xf007) || (type >= 0xf018 && type <= 0xf117)))
break;
- // The image size must be 0 or greater
- // (0 is allowed, but odd, since we do wind on by the header each
- // time, so we won't get stuck)
- if(imgsize < 0) {
- throw new CorruptPowerPointFileException("The file contains a picture, at position " + _pictures.size() + ", which has a negatively sized data length, so we can't trust any of the picture data");
- }
-
- // If they type (including the bonus 0xF018) is 0, skip it
- PictureType pt = PictureType.forNativeID(type - 0xF018);
- if (pt == null) {
- logger.log(POILogger.ERROR, "Problem reading picture: Invalid image type 0, on picture with length " + imgsize + ".\nYou document will probably become corrupted if you save it!");
- logger.log(POILogger.ERROR, "" + pos);
- } else {
- // Build the PictureData object from the data
+ // The image size must be 0 or greater
+ // (0 is allowed, but odd, since we do wind on by the header each
+ // time, so we won't get stuck)
+ if (imgsize < 0) {
+ throw new CorruptPowerPointFileException("The file contains a picture, at position " + _pictures.size() + ", which has a negatively sized data length, so we can't trust any of the picture data");
+ }
+
+ // If they type (including the bonus 0xF018) is 0, skip it
+ PictureType pt = PictureType.forNativeID(type - 0xF018);
+ if (pt == null) {
+ logger.log(POILogger.ERROR, "Problem reading picture: Invalid image type 0, on picture with length " + imgsize + ".\nYou document will probably become corrupted if you save it!");
+ logger.log(POILogger.ERROR, "" + pos);
+ } else {
+ // Build the PictureData object from the data
try {
- HSLFPictureData pict = HSLFPictureData.create(pt);
- pict.setSignature(signature);
+ HSLFPictureData pict = HSLFPictureData.create(pt);
+ pict.setSignature(signature);
// Copy the data, ready to pass to PictureData
byte[] imgdata = new byte[imgsize];
pict.setOffset(offset);
pict.setIndex(_pictures.size());
- _pictures.add(pict);
- } catch(IllegalArgumentException e) {
- logger.log(POILogger.ERROR, "Problem reading picture: " + e + "\nYou document will probably become corrupted if you save it!");
- }
- }
+ _pictures.add(pict);
+ } catch (IllegalArgumentException e) {
+ logger.log(POILogger.ERROR, "Problem reading picture: " + e + "\nYou document will probably become corrupted if you save it!");
+ }
+ }
pos += imgsize;
}
- }
-
+ }
+
/**
* remove duplicated UserEditAtoms and merge PersistPtrHolder, i.e.
* remove document edit history
}
_records = HSLFSlideShowEncrypted.normalizeRecords(_records);
}
-
-
- /**
+
+
+ /**
* This is a helper functions, which is needed for adding new position dependent records
* or finally write the slideshow to a file.
- *
- * @param os the stream to write to, if null only the references are updated
- * @param interestingRecords a map of interesting records (PersistPtrHolder and UserEditAtom)
- * referenced by their RecordType. Only the very last of each type will be saved to the map.
- * May be null, if not needed.
- * @throws IOException
- */
- public void updateAndWriteDependantRecords(OutputStream os, Map<RecordTypes,PositionDependentRecord> interestingRecords)
- throws IOException {
+ *
+ * @param os the stream to write to, if null only the references are updated
+ * @param interestingRecords a map of interesting records (PersistPtrHolder and UserEditAtom)
+ * referenced by their RecordType. Only the very last of each type will be saved to the map.
+ * May be null, if not needed.
+ * @throws IOException
+ */
+ public void updateAndWriteDependantRecords(OutputStream os, Map<RecordTypes, PositionDependentRecord> interestingRecords)
+ throws IOException {
// For position dependent records, hold where they were and now are
// As we go along, update, and hand over, to any Position Dependent
// records we happen across
- Map<Integer,Integer> oldToNewPositions = new HashMap<Integer,Integer>();
+ Map<Integer, Integer> oldToNewPositions = new HashMap<Integer, Integer>();
// First pass - figure out where all the position dependent
// records are going to end up, in the new scheme
CountingOS cos = new CountingOS();
for (Record record : _records) {
// all top level records are position dependent
- assert(record instanceof PositionDependentRecord);
- PositionDependentRecord pdr = (PositionDependentRecord)record;
+ assert (record instanceof PositionDependentRecord);
+ PositionDependentRecord pdr = (PositionDependentRecord) record;
int oldPos = pdr.getLastOnDiskOffset();
int newPos = cos.size();
pdr.setLastOnDiskOffset(newPos);
if (oldPos != UNSET_OFFSET) {
// new records don't need a mapping, as they aren't in a relation yet
- oldToNewPositions.put(oldPos,newPos);
+ oldToNewPositions.put(oldPos, newPos);
}
// Grab interesting records as they come past
// this will only save the very last record of each type
RecordTypes saveme = null;
- int recordType = (int)record.getRecordType();
+ int recordType = (int) record.getRecordType();
if (recordType == RecordTypes.PersistPtrIncrementalBlock.typeID) {
saveme = RecordTypes.PersistPtrIncrementalBlock;
- ptr = (PersistPtrHolder)pdr;
+ ptr = (PersistPtrHolder) pdr;
} else if (recordType == RecordTypes.UserEditAtom.typeID) {
saveme = RecordTypes.UserEditAtom;
- usr = (UserEditAtom)pdr;
+ usr = (UserEditAtom) pdr;
}
if (interestingRecords != null && saveme != null) {
- interestingRecords.put(saveme,pdr);
+ interestingRecords.put(saveme, pdr);
}
-
+
// Dummy write out, so the position winds on properly
record.writeOut(cos);
}
cos.close();
-
+
if (usr == null || ptr == null) {
throw new HSLFException("UserEditAtom or PersistPtr can't be determined.");
}
-
- Map<Integer,Integer> persistIds = new HashMap<Integer,Integer>();
- for (Map.Entry<Integer,Integer> entry : ptr.getSlideLocationsLookup().entrySet()) {
+
+ Map<Integer, Integer> persistIds = new HashMap<Integer, Integer>();
+ for (Map.Entry<Integer, Integer> entry : ptr.getSlideLocationsLookup().entrySet()) {
persistIds.put(oldToNewPositions.get(entry.getValue()), entry.getKey());
}
-
+
HSLFSlideShowEncrypted encData = new HSLFSlideShowEncrypted(getDocumentEncryptionAtom());
-
- for (Record record : _records) {
- assert(record instanceof PositionDependentRecord);
+
+ for (Record record : _records) {
+ assert (record instanceof PositionDependentRecord);
// We've already figured out their new location, and
// told them that
// Tell them of the positions of the other records though
- PositionDependentRecord pdr = (PositionDependentRecord)record;
+ PositionDependentRecord pdr = (PositionDependentRecord) record;
Integer persistId = persistIds.get(pdr.getLastOnDiskOffset());
if (persistId == null) persistId = 0;
-
+
// For now, we're only handling PositionDependentRecord's that
// happen at the top level.
// In future, we'll need the handle them everywhere, but that's
// a bit trickier
pdr.updateOtherRecordReferences(oldToNewPositions);
-
+
// Whatever happens, write out that record tree
if (os != null) {
record.writeOut(encData.encryptRecord(os, persistId, record));
}
}
-
- encData.close();
+
+ encData.close();
// Update and write out the Current User atom
- int oldLastUserEditAtomPos = (int)currentUser.getCurrentEditOffset();
+ int oldLastUserEditAtomPos = (int) currentUser.getCurrentEditOffset();
Integer newLastUserEditAtomPos = oldToNewPositions.get(oldLastUserEditAtomPos);
if (newLastUserEditAtomPos == null || usr.getLastOnDiskOffset() != newLastUserEditAtomPos) {
throw new HSLFException("Couldn't find the new location of the last UserEditAtom that used to be at " + oldLastUserEditAtomPos);
/**
* Writes out the slideshow to the currently open file.
- *
+ * <p>
* <p>This will fail (with an {@link IllegalStateException} if the
- * slideshow was opened read-only, opened from an {@link InputStream}
- * instead of a File, or if this is not the root document. For those cases,
- * you must use {@link #write(OutputStream)} or {@link #write(File)} to
- * write to a brand new document.
- *
- * @since POI 3.15 beta 3
- *
- * @throws IOException thrown on errors writing to the file
+ * slideshow was opened read-only, opened from an {@link InputStream}
+ * instead of a File, or if this is not the root document. For those cases,
+ * you must use {@link #write(OutputStream)} or {@link #write(File)} to
+ * write to a brand new document.
+ *
+ * @throws IOException thrown on errors writing to the file
* @throws IllegalStateException if this isn't from a writable File
+ * @since POI 3.15 beta 3
*/
@Override
public void write() throws IOException {
validateInPlaceWritePossible();
-
+
// Write the PowerPoint streams to the current FileSystem
// No need to do anything to other streams, already there!
write(directory.getFileSystem(), false);
-
+
// Sync with the File on disk
directory.getFileSystem().writeFilesystem();
}
-
+
/**
* Writes out the slideshow file the is represented by an instance
- * of this class.
+ * of this class.
* <p>This will write out only the common OLE2 streams. If you require all
- * streams to be written out, use {@link #write(File, boolean)}
- * with <code>preserveNodes</code> set to <code>true</code>.
+ * streams to be written out, use {@link #write(File, boolean)}
+ * with <code>preserveNodes</code> set to <code>true</code>.
+ *
* @param newFile The File to write to.
* @throws IOException If there is an unexpected IOException from writing to the File
*/
// Write out, but only the common streams
write(newFile, false);
}
+
/**
* Writes out the slideshow file the is represented by an instance
- * of this class.
+ * of this class.
* If you require all streams to be written out (eg Marcos, embeded
- * documents), then set <code>preserveNodes</code> set to <code>true</code>
- * @param newFile The File to write to.
+ * documents), then set <code>preserveNodes</code> set to <code>true</code>
+ *
+ * @param newFile The File to write to.
* @param preserveNodes Should all OLE2 streams be written back out, or only the common ones?
* @throws IOException If there is an unexpected IOException from writing to the File
*/
public void write(File newFile, boolean preserveNodes) throws IOException {
// Get a new FileSystem to write into
POIFSFileSystem outFS = POIFSFileSystem.create(newFile);
-
+
try {
// Write into the new FileSystem
write(outFS, preserveNodes);
/**
* Writes out the slideshow file the is represented by an instance
- * of this class.
+ * of this class.
* <p>This will write out only the common OLE2 streams. If you require all
- * streams to be written out, use {@link #write(OutputStream, boolean)}
- * with <code>preserveNodes</code> set to <code>true</code>.
+ * streams to be written out, use {@link #write(OutputStream, boolean)}
+ * with <code>preserveNodes</code> set to <code>true</code>.
+ *
* @param out The OutputStream to write to.
* @throws IOException If there is an unexpected IOException from
- * the passed in OutputStream
+ * the passed in OutputStream
*/
- @Override
+ @Override
public void write(OutputStream out) throws IOException {
// Write out, but only the common streams
write(out, false);
}
+
/**
* Writes out the slideshow file the is represented by an instance
- * of this class.
+ * of this class.
* If you require all streams to be written out (eg Marcos, embeded
- * documents), then set <code>preserveNodes</code> set to <code>true</code>
- * @param out The OutputStream to write to.
+ * documents), then set <code>preserveNodes</code> set to <code>true</code>
+ *
+ * @param out The OutputStream to write to.
* @param preserveNodes Should all OLE2 streams be written back out, or only the common ones?
* @throws IOException If there is an unexpected IOException from
- * the passed in OutputStream
+ * the passed in OutputStream
*/
public void write(OutputStream out, boolean preserveNodes) throws IOException {
// Get a new FileSystem to write into
POIFSFileSystem outFS = new POIFSFileSystem();
-
+
try {
// Write into the new FileSystem
write(outFS, preserveNodes);
outFS.close();
}
}
+
private void write(NPOIFSFileSystem outFS, boolean copyAllOtherNodes) throws IOException {
// read properties and pictures, with old encryption settings where appropriate
if (_pictures == null) {
- readPictures();
+ readPictures();
}
getDocumentSummaryInformation();
// Write out the Property Streams
writeProperties(outFS, writtenEntries);
-
+
BufAccessBAOS baos = new BufAccessBAOS();
// For position dependent records, hold where they were and now are
// Write the PPT stream into the POIFS layer
ByteArrayInputStream bais = new ByteArrayInputStream(_docstream);
- outFS.createOrUpdateDocument(bais,"PowerPoint Document");
+ outFS.createOrUpdateDocument(bais, "PowerPoint Document");
writtenEntries.add("PowerPoint Document");
-
+
currentUser.setEncrypted(encryptedSS.getDocumentEncryptionAtom() != null);
currentUser.writeToFS(outFS);
writtenEntries.add("Current User");
encryptedSS.encryptPicture(pict.getBuf(), offset);
}
outFS.createOrUpdateDocument(
- new ByteArrayInputStream(pict.getBuf(), 0, pict.size()), "Pictures"
+ new ByteArrayInputStream(pict.getBuf(), 0, pict.size()), "Pictures"
);
writtenEntries.add("Pictures");
pict.close();
}
}
- /**
+ /**
* For a given named property entry, either return it or null if
- * if it wasn't found
- *
- * @param setName The property to read
- * @return The value of the given property or null if it wasn't found.
+ * if it wasn't found
+ *
+ * @param setName The property to read
+ * @return The value of the given property or null if it wasn't found.
*/
protected PropertySet getPropertySet(String setName) {
DocumentEncryptionAtom dea = getDocumentEncryptionAtom();
return (dea == null)
- ? super.getPropertySet(setName)
- : super.getPropertySet(setName, dea.getEncryptionInfo());
+ ? super.getPropertySet(setName)
+ : super.getPropertySet(setName, dea.getEncryptionInfo());
}
/**
* Writes out the standard Documment Information Properties (HPSF)
- * @param outFS the POIFSFileSystem to write the properties into
+ *
+ * @param outFS the POIFSFileSystem to write the properties into
* @param writtenEntries a list of POIFS entries to add the property names too
- *
- * @throws IOException if an error when writing to the
- * {@link POIFSFileSystem} occurs
+ * @throws IOException if an error when writing to the
+ * {@link POIFSFileSystem} occurs
*/
protected void writeProperties(NPOIFSFileSystem outFS, List<String> writtenEntries) throws IOException {
super.writeProperties(outFS, writtenEntries);
DocumentEncryptionAtom dea = getDocumentEncryptionAtom();
if (dea != null) {
- CryptoAPIEncryptor enc = (CryptoAPIEncryptor)dea.getEncryptionInfo().getEncryptor();
+ CryptoAPIEncryptor enc = (CryptoAPIEncryptor) dea.getEncryptionInfo().getEncryptor();
try {
enc.getSummaryEntries(outFS.getRoot()); // ignore OutputStream
} catch (IOException e) {
/* ******************* adding methods follow ********************* */
- /**
- * Adds a new root level record, at the end, but before the last
- * PersistPtrIncrementalBlock.
- */
- public synchronized int appendRootLevelRecord(Record newRecord) {
- int addedAt = -1;
- Record[] r = new Record[_records.length+1];
- boolean added = false;
- for(int i=(_records.length-1); i>=0; i--) {
- if(added) {
- // Just copy over
- r[i] = _records[i];
- } else {
- r[(i+1)] = _records[i];
- if(_records[i] instanceof PersistPtrHolder) {
- r[i] = newRecord;
- added = true;
- addedAt = i;
- }
- }
- }
- _records = r;
- return addedAt;
- }
-
- /**
- * Add a new picture to this presentation.
+ /**
+ * Adds a new root level record, at the end, but before the last
+ * PersistPtrIncrementalBlock.
+ */
+ public synchronized int appendRootLevelRecord(Record newRecord) {
+ int addedAt = -1;
+ Record[] r = new Record[_records.length + 1];
+ boolean added = false;
+ for (int i = (_records.length - 1); i >= 0; i--) {
+ if (added) {
+ // Just copy over
+ r[i] = _records[i];
+ } else {
+ r[(i + 1)] = _records[i];
+ if (_records[i] instanceof PersistPtrHolder) {
+ r[i] = newRecord;
+ added = true;
+ addedAt = i;
+ }
+ }
+ }
+ _records = r;
+ return addedAt;
+ }
+
+ /**
+ * Add a new picture to this presentation.
*
* @return offset of this picture in the Pictures stream
- */
- public int addPicture(HSLFPictureData img) {
- // Process any existing pictures if we haven't yet
- if(_pictures == null) {
- try {
- readPictures();
- } catch(IOException e) {
- throw new CorruptPowerPointFileException(e.getMessage());
- }
- }
-
- // Add the new picture in
- int offset = 0;
- if(_pictures.size() > 0) {
- HSLFPictureData prev = _pictures.get(_pictures.size() - 1);
- offset = prev.getOffset() + prev.getRawData().length + 8;
- }
- img.setOffset(offset);
- img.setIndex(_pictures.size()+1);
- _pictures.add(img);
- return offset;
- }
+ */
+ public int addPicture(HSLFPictureData img) {
+ // Process any existing pictures if we haven't yet
+ if (_pictures == null) {
+ try {
+ readPictures();
+ } catch (IOException e) {
+ throw new CorruptPowerPointFileException(e.getMessage());
+ }
+ }
+
+ // Add the new picture in
+ int offset = 0;
+ if (_pictures.size() > 0) {
+ HSLFPictureData prev = _pictures.get(_pictures.size() - 1);
+ offset = prev.getOffset() + prev.getRawData().length + 8;
+ }
+ img.setOffset(offset);
+ img.setIndex(_pictures.size() + 1);
+ _pictures.add(img);
+ return offset;
+ }
/* ******************* fetching methods follow ********************* */
- /**
- * Returns an array of all the records found in the slideshow
- */
- public Record[] getRecords() { return _records; }
-
- /**
- * Returns an array of the bytes of the file. Only correct after a
- * call to open or write - at all other times might be wrong!
- */
- public byte[] getUnderlyingBytes() { return _docstream; }
-
- /**
- * Fetch the Current User Atom of the document
- */
- public CurrentUserAtom getCurrentUserAtom() { return currentUser; }
-
- /**
- * Return list of pictures contained in this presentation
- *
- * @return list with the read pictures or an empty list if the
- * presentation doesn't contain pictures.
- */
- public List<HSLFPictureData> getPictureData() {
- if(_pictures == null) {
- try {
- readPictures();
- } catch(IOException e) {
- throw new CorruptPowerPointFileException(e.getMessage());
- }
- }
-
- return Collections.unmodifiableList(_pictures);
- }
+ /**
+ * Returns an array of all the records found in the slideshow
+ */
+ public Record[] getRecords() {
+ return _records;
+ }
+
+ /**
+ * Returns an array of the bytes of the file. Only correct after a
+ * call to open or write - at all other times might be wrong!
+ */
+ public byte[] getUnderlyingBytes() {
+ return _docstream;
+ }
+
+ /**
+ * Fetch the Current User Atom of the document
+ */
+ public CurrentUserAtom getCurrentUserAtom() {
+ return currentUser;
+ }
+
+ /**
+ * Return list of pictures contained in this presentation
+ *
+ * @return list with the read pictures or an empty list if the
+ * presentation doesn't contain pictures.
+ */
+ public List<HSLFPictureData> getPictureData() {
+ if (_pictures == null) {
+ try {
+ readPictures();
+ } catch (IOException e) {
+ throw new CorruptPowerPointFileException(e.getMessage());
+ }
+ }
+
+ return Collections.unmodifiableList(_pictures);
+ }
/**
* Gets embedded object data from the slide show.
List<HSLFObjectData> objects = new ArrayList<HSLFObjectData>();
for (Record r : _records) {
if (r instanceof ExOleObjStg) {
- objects.add(new HSLFObjectData((ExOleObjStg)r));
+ objects.add(new HSLFObjectData((ExOleObjStg) r));
}
}
_objects = objects.toArray(new HSLFObjectData[objects.size()]);
}
return _objects;
}
-
+
@Override
public void close() throws IOException {
NPOIFSFileSystem fs = directory.getFileSystem();
fs.close();
}
}
-
-
+
+
private static class BufAccessBAOS extends ByteArrayOutputStream {
public byte[] getBuf() {
return buf;
}
}
-
+
private static class CountingOS extends OutputStream {
int count = 0;
+
public void write(int b) throws IOException {
count++;
}
public void write(byte[] b, int off, int len) throws IOException {
count += len;
}
-
+
public int size() {
return count;
}