]> source.dussan.org Git - poi.git/commitdiff
Partial NPOIFS write implementation, and tidy up of methods around this
authorNick Burch <nick@apache.org>
Tue, 28 Dec 2010 05:31:32 +0000 (05:31 +0000)
committerNick Burch <nick@apache.org>
Tue, 28 Dec 2010 05:31:32 +0000 (05:31 +0000)
git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1053261 13f79535-47bb-0310-9956-ffa450edef68

src/java/org/apache/poi/poifs/filesystem/NPOIFSFileSystem.java
src/java/org/apache/poi/poifs/filesystem/NPOIFSMiniStore.java
src/java/org/apache/poi/poifs/nio/ByteArrayBackedDataSource.java
src/java/org/apache/poi/poifs/nio/DataSource.java
src/java/org/apache/poi/poifs/nio/FileBackedDataSource.java
src/java/org/apache/poi/poifs/storage/BATBlock.java
src/java/org/apache/poi/poifs/storage/BlockAllocationTableWriter.java
src/java/org/apache/poi/poifs/storage/HeaderBlockWriter.java

index 8147e149c314e7f68dabca60c5a3e982b04aab8d..af1c0127e4ebe2c8a244d59ce2b8a9a0f7e5d8f9 100644 (file)
@@ -19,6 +19,7 @@
 
 package org.apache.poi.poifs.filesystem;
 
+import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
@@ -87,8 +88,6 @@ public class NPOIFSFileSystem extends BlockStore
     
     private DataSource _data;
     
-    private List          _documents; // TODO - probably remove this shortly
-
     /**
      * What big block size the file uses. Most files
      *  use 512 bytes, but a few use 4096
@@ -519,6 +518,26 @@ public class NPOIFSFileSystem extends BlockStore
     {
         return getRoot().createDirectory(name);
     }
+    
+    /**
+     * Write the filesystem out to the open file. Will thrown an
+     *  {@link IllegalArgumentException} if opened from an 
+     *  {@link InputStream}.
+     * 
+     * @exception IOException thrown on errors writing to the stream
+     */
+    public void writeFilesystem() throws IOException
+    {
+       if(_data instanceof FileBackedDataSource) {
+          // Good, correct type
+       } else {
+          throw new IllegalArgumentException(
+                "POIFS opened from an inputstream, so writeFilesystem() may " +
+                "not be called. Use writeFilesystem(OutputStream) instead"
+          );
+       }
+       syncWithDataSource();
+    }
 
     /**
      * Write the filesystem out
@@ -532,94 +551,36 @@ public class NPOIFSFileSystem extends BlockStore
     public void writeFilesystem(final OutputStream stream)
         throws IOException
     {
-        // create the small block store, and the SBAT
-        SmallBlockTableWriter      sbtw       =
-            new SmallBlockTableWriter(bigBlockSize, _documents, _property_table.getRoot());
-
-        // create the block allocation table
-        BlockAllocationTableWriter bat        =
-            new BlockAllocationTableWriter(bigBlockSize);
-
-        // create a list of BATManaged objects: the documents plus the
-        // property table and the small block table
-        List bm_objects = new ArrayList();
-
-        bm_objects.addAll(_documents);
-        bm_objects.add(_property_table);
-        bm_objects.add(sbtw);
-        bm_objects.add(sbtw.getSBAT());
-
-        // walk the list, allocating space for each and assigning each
-        // a starting block number
-        Iterator iter = bm_objects.iterator();
-
-        while (iter.hasNext())
-        {
-            BATManaged bmo         = ( BATManaged ) iter.next();
-            int        block_count = bmo.countBlocks();
-
-            if (block_count != 0)
-            {
-                bmo.setStartBlock(bat.allocateSpace(block_count));
-            }
-            else
-            {
-
-                // Either the BATManaged object is empty or its data
-                // is composed of SmallBlocks; in either case,
-                // allocating space in the BAT is inappropriate
-            }
-        }
-
-        // allocate space for the block allocation table and take its
-        // starting block
-        int               batStartBlock       = bat.createBlocks();
-
-        // get the extended block allocation table blocks
-        HeaderBlockWriter header_block_writer = new HeaderBlockWriter(bigBlockSize);
-        BATBlock[]        xbat_blocks         =
-            header_block_writer.setBATBlocks(bat.countBlocks(),
-                                             batStartBlock);
-
-        // set the property table start block
-        header_block_writer.setPropertyStart(_property_table.getStartBlock());
-
-        // set the small block allocation table start block
-        header_block_writer.setSBATStart(sbtw.getSBAT().getStartBlock());
-
-        // set the small block allocation table block count
-        header_block_writer.setSBATBlockCount(sbtw.getSBATBlockCount());
-
-        // the header is now properly initialized. Make a list of
-        // writers (the header block, followed by the documents, the
-        // property table, the small block store, the small block
-        // allocation table, the block allocation table, and the
-        // extended block allocation table blocks)
-        List writers = new ArrayList();
-
-        writers.add(header_block_writer);
-        writers.addAll(_documents);
-        writers.add(sbtw);
-        writers.add(sbtw.getSBAT());
-        writers.add(bat);
-        for (int j = 0; j < xbat_blocks.length; j++)
-        {
-            writers.add(xbat_blocks[ j ]);
-        }
-
-        // now, write everything out
-        iter = writers.iterator();
-        while (iter.hasNext())
-        {
-            BlockWritable writer = ( BlockWritable ) iter.next();
-
-            writer.writeBlocks(stream);
-        }
-        
-        // Finally have the property table serialise itself
-        _property_table.write(
-              new NPOIFSStream(this, _header.getPropertyStart())
-        );
+       // Have the datasource updated
+       syncWithDataSource();
+       
+       // Now copy the contents to the stream
+       _data.copyTo(stream);
+    }
+    
+    /**
+     * Has our in-memory objects write their state
+     *  to their backing blocks 
+     */
+    private void syncWithDataSource() throws IOException
+    {
+       // HeaderBlock
+       HeaderBlockWriter hbw = new HeaderBlockWriter(_header);
+       hbw.writeBlock( getBlockAt(0) );
+       
+       // BATs
+       for(BATBlock bat : _bat_blocks) {
+          ByteBuffer block = getBlockAt(bat.getOurBlockIndex());
+          BlockAllocationTableWriter.writeBlock(bat, block);
+       }
+       
+       // SBATs
+       _mini_store.syncWithDataSource();
+       
+       // Properties
+       _property_table.write(
+             new NPOIFSStream(this, _header.getPropertyStart())
+       );
     }
 
     /**
@@ -682,29 +643,6 @@ public class NPOIFSFileSystem extends BlockStore
        return getRoot().createDocumentInputStream(documentName);
     }
 
-    /**
-     * add a new POIFSDocument
-     *
-     * @param document the POIFSDocument being added
-     */
-
-    void addDocument(final POIFSDocument document)
-    {
-        _documents.add(document);
-        _property_table.addProperty(document.getDocumentProperty());
-    }
-
-    /**
-     * add a new DirectoryProperty
-     *
-     * @param directory the DirectoryProperty being added
-     */
-
-    void addDirectory(final DirectoryProperty directory)
-    {
-        _property_table.addProperty(directory);
-    }
-
     /**
      * remove an entry
      *
@@ -714,62 +652,6 @@ public class NPOIFSFileSystem extends BlockStore
     void remove(EntryNode entry)
     {
         _property_table.removeProperty(entry.getProperty());
-        if (entry.isDocumentEntry())
-        {
-            _documents.remove((( DocumentNode ) entry).getDocument());
-        }
-    }
-
-    private void processProperties(final BlockList small_blocks,
-                                   final BlockList big_blocks,
-                                   final Iterator properties,
-                                   final DirectoryNode dir,
-                                   final int headerPropertiesStartAt)
-        throws IOException
-    {
-        while (properties.hasNext())
-        {
-            Property      property = ( Property ) properties.next();
-            String        name     = property.getName();
-            DirectoryNode parent   = (dir == null)
-                                     ? (( DirectoryNode ) getRoot())
-                                     : dir;
-
-            if (property.isDirectory())
-            {
-                DirectoryNode new_dir =
-                    ( DirectoryNode ) parent.createDirectory(name);
-
-                new_dir.setStorageClsid( property.getStorageClsid() );
-
-                processProperties(
-                    small_blocks, big_blocks,
-                    (( DirectoryProperty ) property).getChildren(),
-                    new_dir, headerPropertiesStartAt);
-            }
-            else
-            {
-                int           startBlock = property.getStartBlock();
-                int           size       = property.getSize();
-                POIFSDocument document   = null;
-
-                if (property.shouldUseSmallBlocks())
-                {
-                    document =
-                        new POIFSDocument(name,
-                                          small_blocks.fetchBlocks(startBlock, headerPropertiesStartAt),
-                                          size);
-                }
-                else
-                {
-                    document =
-                        new POIFSDocument(name,
-                                          big_blocks.fetchBlocks(startBlock, headerPropertiesStartAt),
-                                          size);
-                }
-                parent.createDocument(document);
-            }
-        }
     }
     
     /* ********** START begin implementation of POIFSViewable ********** */
index 9894941680773124a6dfce21b6c667acfe1969e4..7323045e5b9bf0997abe7c673e669381bbd5a2f1 100644 (file)
@@ -27,6 +27,7 @@ import java.util.List;
 import org.apache.poi.poifs.common.POIFSConstants;
 import org.apache.poi.poifs.property.RootProperty;
 import org.apache.poi.poifs.storage.BATBlock;
+import org.apache.poi.poifs.storage.BlockAllocationTableWriter;
 import org.apache.poi.poifs.storage.HeaderBlock;
 import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex;
 
@@ -194,5 +195,14 @@ public class NPOIFSMiniStore extends BlockStore
     protected int getBlockStoreBlockSize() {
        return POIFSConstants.SMALL_BLOCK_SIZE;
     }
+    
+    /**
+     * Writes the SBATs to their backing blocks
+     */
+    protected void syncWithDataSource() throws IOException {
+       for(BATBlock sbat : _sbat_blocks) {
+          ByteBuffer block = _filesystem.getBlockAt(sbat.getOurBlockIndex());
+          BlockAllocationTableWriter.writeBlock(sbat, block);
+       }
+    }
 }
-
index 1df2b1b064d673e44b086686e19f57ae2130fdc1..24460a2e3f20c7cc0a18aff691d9de5e48239f06 100644 (file)
@@ -17,6 +17,8 @@
 
 package org.apache.poi.poifs.nio;
 
+import java.io.IOException;
+import java.io.OutputStream;
 import java.nio.ByteBuffer;
 
 /**
@@ -77,6 +79,10 @@ public class ByteArrayBackedDataSource extends DataSource {
       buffer = nb;
    }
    
+   public void copyTo(OutputStream stream) throws IOException {
+      stream.write(buffer, 0, (int)size);
+   }
+   
    public long size() {
       return size;
    }
index 4d56525fd27be45f00167ea4f49a399c9caacc71..f43667626f6b319d975b27b943e1c2c9e5fd44d9 100644 (file)
@@ -18,6 +18,7 @@
 package org.apache.poi.poifs.nio;
 
 import java.io.IOException;
+import java.io.OutputStream;
 import java.nio.ByteBuffer;
 
 /**
@@ -27,5 +28,8 @@ public abstract class DataSource {
    public abstract ByteBuffer read(int length, long position) throws IOException;
    public abstract void write(ByteBuffer src, long position) throws IOException;
    public abstract long size() throws IOException;
+   /** Close the underlying stream */
    public abstract void close() throws IOException;
+   /** Copies the contents to the specified OutputStream */
+   public abstract void copyTo(OutputStream stream) throws IOException;
 }
index bba58efeaacb7f84bfe4fafcc9bb9799aff33fed..ed2f7ce1101a501bf0a8c247d0bf8ab62da2ff71 100644 (file)
@@ -20,9 +20,12 @@ package org.apache.poi.poifs.nio;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
 import java.nio.channels.FileChannel;
+import java.nio.channels.WritableByteChannel;
 
 import org.apache.poi.util.IOUtils;
 
@@ -68,6 +71,13 @@ public class FileBackedDataSource extends DataSource {
       channel.write(src, position);
    }
    
+   public void copyTo(OutputStream stream) throws IOException {
+      // Wrap the OutputSteam as a channel
+      WritableByteChannel out = Channels.newChannel(stream);
+      // Now do the transfer
+      channel.transferTo(0, channel.size(), out);
+   }
+   
    public long size() throws IOException {
       return channel.size();
    }
index a48b43aa2c37674749d39daa322bb5ec4bef5cbd..6f85c84c7193f136688473288668bf2146a102e2 100644 (file)
@@ -354,6 +354,18 @@ public final class BATBlock extends BigBlock {
     void writeData(final OutputStream stream)
         throws IOException
     {
+       // Save it out
+       stream.write( serialize() );
+    }
+    
+    void writeData(final ByteBuffer block)
+        throws IOException
+    {
+       // Save it out
+       block.put( serialize() );
+    }
+    
+    private byte[] serialize() {
        // Create the empty array
        byte[] data = new byte[ bigBlockSize.getBigBlockSize() ];
        
@@ -364,8 +376,8 @@ public final class BATBlock extends BigBlock {
           offset += LittleEndian.INT_SIZE;
        }
        
-       // Save it out
-       stream.write(data);
+       // Done
+       return data;
     }
 
     /* **********  END  extension of BigBlock ********** */
index 12a88c34d39e908dad73a61bd40c19337047736e..e037b892d6b390aa48cd38aa59397fe2ff4e7a25 100644 (file)
@@ -19,6 +19,7 @@ package org.apache.poi.poifs.storage;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.ByteBuffer;
 
 import org.apache.poi.poifs.common.POIFSBigBlockSize;
 import org.apache.poi.poifs.common.POIFSConstants;
@@ -155,6 +156,15 @@ public final class BlockAllocationTableWriter implements BlockWritable, BATManag
             _blocks[ j ].writeBlocks(stream);
         }
     }
+    
+    /**
+     * Write the BAT into its associated block
+     */
+    public static void writeBlock(final BATBlock bat, final ByteBuffer block) 
+        throws IOException
+    {
+        bat.writeData(block);
+    }
 
     /**
      * Return the number of BigBlock's this instance uses
index ed9bdab3355f6fecbb8afe500d31900d711ab4e0..531c2e832fb922567c96689d3ca55f6ee881f2ee 100644 (file)
 
 package org.apache.poi.poifs.storage;
 
+import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.ByteBuffer;
 
 import org.apache.poi.poifs.common.POIFSBigBlockSize;
 import org.apache.poi.poifs.common.POIFSConstants;
@@ -37,12 +39,20 @@ public class HeaderBlockWriter implements HeaderBlockConstants, BlockWritable
     /**
      * Create a single instance initialized with default values
      */
-
     public HeaderBlockWriter(POIFSBigBlockSize bigBlockSize)
     {
        _header_block = new HeaderBlock(bigBlockSize);
     }
 
+    /**
+     * Create a single instance initialized with the specified 
+     *  existing values
+     */
+    public HeaderBlockWriter(HeaderBlock headerBlock)
+    {
+       _header_block = headerBlock;
+    }
+
     /**
      * Set BAT block parameters. Assumes that all BAT blocks are
      * contiguous. Will construct XBAT blocks if necessary and return
@@ -155,12 +165,30 @@ public class HeaderBlockWriter implements HeaderBlockConstants, BlockWritable
      * @exception IOException on problems writing to the specified
      *            stream
      */
-
     public void writeBlocks(final OutputStream stream)
         throws IOException
     {
         _header_block.writeData(stream);
     }
+    
+    /**
+     * Write the block's data to an existing block
+     *
+     * @param block the ByteBuffer of the block to which the 
+     *               stored data should be written
+     *
+     * @exception IOException on problems writing to the block
+     */
+    public void writeBlock(ByteBuffer block)
+        throws IOException
+    {
+       ByteArrayOutputStream baos = new ByteArrayOutputStream(
+             _header_block.getBigBlockSize().getBigBlockSize()
+       );
+       _header_block.writeData(baos);
+       
+       block.put(baos.toByteArray());
+    }
 
     /* **********  END  extension of BigBlock ********** */
 }   // end public class HeaderBlockWriter