aboutsummaryrefslogtreecommitdiffstats
path: root/src/java/org
diff options
context:
space:
mode:
authorJosh Micich <josh@apache.org>2008-11-06 02:38:06 +0000
committerJosh Micich <josh@apache.org>2008-11-06 02:38:06 +0000
commit2963774c3752202c0686b0223d7dfd8ae1973ca8 (patch)
tree305535395789ea61f556b412ab5288a7886e06dd /src/java/org
parent75e352f2a073bdda52a2e3c30df63be6c71ff61d (diff)
downloadpoi-2963774c3752202c0686b0223d7dfd8ae1973ca8.tar.gz
poi-2963774c3752202c0686b0223d7dfd8ae1973ca8.zip
Merged revisions 709570,709598,710114,710134,710136,711505,711513-711515,711694,711739,711741,711746,711749 via svnmerge from
https://svn.apache.org/repos/asf/poi/trunk ........ r709570 | josh | 2008-10-31 14:17:08 -0700 (Fri, 31 Oct 2008) | 1 line made DrawingSelectionRecord into a plain BIFF record (not an escher holder aggregate). Added some interpretation of fields ........ r709598 | josh | 2008-10-31 16:24:41 -0700 (Fri, 31 Oct 2008) | 1 line Simplified multiple record expansion logic ........ r710114 | yegor | 2008-11-03 09:54:01 -0800 (Mon, 03 Nov 2008) | 1 line fixed #46122: Picture#getEscherBSERecord threw NullPointerException if EscherContainerRecord.BSTORE_CONTAINER was not found ........ r710134 | yegor | 2008-11-03 11:19:39 -0800 (Mon, 03 Nov 2008) | 1 line fixed bug #46033: table cells had incorrect text type resulting in corrupted style info ........ r710136 | yegor | 2008-11-03 11:23:52 -0800 (Mon, 03 Nov 2008) | 1 line updated status of the fixed bug #46033 ........ r711505 | josh | 2008-11-04 19:50:31 -0800 (Tue, 04 Nov 2008) | 1 line Refactored test case ........ r711513 | josh | 2008-11-04 21:45:17 -0800 (Tue, 04 Nov 2008) | 1 line Converted ConstantValueParser to use plain Strings instead of UnicodeStrings ........ r711514 | josh | 2008-11-04 21:52:35 -0800 (Tue, 04 Nov 2008) | 1 line Converted SupBookRecord to use plain Strings instead of UnicodeStrings ........ r711515 | josh | 2008-11-04 22:15:59 -0800 (Tue, 04 Nov 2008) | 1 line Refactored test case ........ r711694 | josh | 2008-11-05 12:46:00 -0800 (Wed, 05 Nov 2008) | 1 line Fixed bug in conversion to/from text cells ........ r711739 | josh | 2008-11-05 15:28:55 -0800 (Wed, 05 Nov 2008) | 1 line Refactoring test case ........ r711741 | josh | 2008-11-05 15:35:02 -0800 (Wed, 05 Nov 2008) | 1 line Refactoring test case ........ r711746 | josh | 2008-11-05 15:45:42 -0800 (Wed, 05 Nov 2008) | 1 line Fixed mistake in test case. Constant value was 4 bytes too large (should be max *data* size not max *record* size). ........ r711749 | josh | 2008-11-05 17:12:41 -0800 (Wed, 05 Nov 2008) | 1 line Introduced ContinuableRecord to help fix serialization of StringRecords with large data. Fixed TextObjectRecord to only write 16bit unicode when needed. Simplification in UnicodeString. ........ git-svn-id: https://svn.apache.org/repos/asf/poi/branches/ooxml@711755 13f79535-47bb-0310-9956-ffa450edef68
Diffstat (limited to 'src/java/org')
-rwxr-xr-xsrc/java/org/apache/poi/hssf/model/LinkTable.java11
-rw-r--r--src/java/org/apache/poi/hssf/record/DrawingSelectionRecord.java146
-rw-r--r--src/java/org/apache/poi/hssf/record/RecordFactory.java141
-rwxr-xr-xsrc/java/org/apache/poi/hssf/record/RecordInputStream.java10
-rw-r--r--src/java/org/apache/poi/hssf/record/SSTRecord.java89
-rw-r--r--src/java/org/apache/poi/hssf/record/SSTRecordHeader.java76
-rw-r--r--src/java/org/apache/poi/hssf/record/SSTRecordSizeCalculator.java51
-rw-r--r--src/java/org/apache/poi/hssf/record/SSTSerializer.java61
-rw-r--r--src/java/org/apache/poi/hssf/record/StringRecord.java118
-rw-r--r--src/java/org/apache/poi/hssf/record/SupBookRecord.java100
-rw-r--r--src/java/org/apache/poi/hssf/record/TextObjectRecord.java128
-rw-r--r--src/java/org/apache/poi/hssf/record/UnicodeString.java521
-rwxr-xr-xsrc/java/org/apache/poi/hssf/record/constant/ConstantValueParser.java16
-rw-r--r--src/java/org/apache/poi/hssf/record/cont/ContinuableRecord.java69
-rw-r--r--src/java/org/apache/poi/hssf/record/cont/ContinuableRecordOutput.java257
-rw-r--r--src/java/org/apache/poi/hssf/record/cont/UnknownLengthRecordOutput.java114
-rw-r--r--src/java/org/apache/poi/hssf/record/formula/ArrayPtg.java4
-rw-r--r--src/java/org/apache/poi/hssf/usermodel/HSSFCell.java70
-rw-r--r--src/java/org/apache/poi/util/DelayableLittleEndianOutput.java34
-rw-r--r--src/java/org/apache/poi/util/LittleEndianByteArrayOutputStream.java8
-rw-r--r--src/java/org/apache/poi/util/StringUtil.java9
21 files changed, 988 insertions, 1045 deletions
diff --git a/src/java/org/apache/poi/hssf/model/LinkTable.java b/src/java/org/apache/poi/hssf/model/LinkTable.java
index 998712e5e8..f587f5e375 100755
--- a/src/java/org/apache/poi/hssf/model/LinkTable.java
+++ b/src/java/org/apache/poi/hssf/model/LinkTable.java
@@ -29,8 +29,9 @@ import org.apache.poi.hssf.record.ExternalNameRecord;
import org.apache.poi.hssf.record.NameRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.SupBookRecord;
-import org.apache.poi.hssf.record.UnicodeString;
+import org.apache.poi.hssf.record.formula.Area3DPtg;
import org.apache.poi.hssf.record.formula.NameXPtg;
+import org.apache.poi.hssf.record.formula.Ref3DPtg;
/**
* Link Table (OOO pdf reference: 4.10.3 ) <p/>
@@ -311,10 +312,10 @@ final class LinkTable {
return null;
}
int shIx = _externSheetRecord.getFirstSheetIndexFromRefIndex(extRefIndex);
- UnicodeString usSheetName = ebr.getSheetNames()[shIx];
+ String usSheetName = ebr.getSheetNames()[shIx];
return new String[] {
ebr.getURL(),
- usSheetName.getString(),
+ usSheetName,
};
}
@@ -345,9 +346,9 @@ final class LinkTable {
return result;
}
- private static int getSheetIndex(UnicodeString[] sheetNames, String sheetName) {
+ private static int getSheetIndex(String[] sheetNames, String sheetName) {
for (int i = 0; i < sheetNames.length; i++) {
- if (sheetNames[i].getString().equals(sheetName)) {
+ if (sheetNames[i].equals(sheetName)) {
return i;
}
diff --git a/src/java/org/apache/poi/hssf/record/DrawingSelectionRecord.java b/src/java/org/apache/poi/hssf/record/DrawingSelectionRecord.java
index 687c11314b..85997e9139 100644
--- a/src/java/org/apache/poi/hssf/record/DrawingSelectionRecord.java
+++ b/src/java/org/apache/poi/hssf/record/DrawingSelectionRecord.java
@@ -17,25 +17,129 @@
package org.apache.poi.hssf.record;
-public final class DrawingSelectionRecord extends AbstractEscherHolderRecord {
- public static final short sid = 0xED;
-
- public DrawingSelectionRecord()
- {
- }
-
- public DrawingSelectionRecord( RecordInputStream in )
- {
- super( in );
- }
-
- protected String getRecordName()
- {
- return "MSODRAWINGSELECTION";
- }
-
- public short getSid()
- {
- return sid;
- }
+import org.apache.poi.util.HexDump;
+import org.apache.poi.util.LittleEndianByteArrayOutputStream;
+import org.apache.poi.util.LittleEndianInput;
+import org.apache.poi.util.LittleEndianOutput;
+
+/**
+ * MsoDrawingSelection (0x00ED)<p/>
+ * Reference:
+ * [MS-OGRAPH].pdf sec 2.4.69
+ *
+ * @author Josh Micich
+ */
+public final class DrawingSelectionRecord extends Record {
+ public static final short sid = 0x00ED;
+
+ /**
+ * From [MS-ODRAW].pdf sec 2.2.1<br/>
+ * TODO - make EscherRecordHeader {@link LittleEndianInput} aware and refactor with this
+ */
+ private static final class OfficeArtRecordHeader {
+ public static final int ENCODED_SIZE = 8;
+ /**
+ * lower 4 bits is 'version' usually 0x01 or 0x0F (for containers)<br/>
+ * upper 12 bits is 'instance'
+ */
+ private final int _verAndInstance;
+ /** value should be between 0xF000 and 0xFFFF */
+ private final int _type;
+ private final int _length;
+
+ public OfficeArtRecordHeader(LittleEndianInput in) {
+ _verAndInstance = in.readUShort();
+ _type = in.readUShort();
+ _length = in.readInt();
+ }
+
+ public void serialize(LittleEndianOutput out) {
+ out.writeShort(_verAndInstance);
+ out.writeShort(_type);
+ out.writeInt(_length);
+ }
+
+ public String debugFormatAsString() {
+ StringBuffer sb = new StringBuffer(32);
+ sb.append("ver+inst=").append(HexDump.shortToHex(_verAndInstance));
+ sb.append(" type=").append(HexDump.shortToHex(_type));
+ sb.append(" len=").append(HexDump.intToHex(_length));
+ return sb.toString();
+ }
+ }
+
+ // [MS-OGRAPH].pdf says that the data of this record is an OfficeArtFDGSL structure
+ // as described in[MS-ODRAW].pdf sec 2.2.33
+ private OfficeArtRecordHeader _header;
+ private int _cpsp;
+ /** a MSODGSLK enum value for the current selection mode */
+ private int _dgslk;
+ private int _spidFocus;
+ /** selected shape IDs (e.g. from EscherSpRecord.ShapeId) */
+ private int[] _shapeIds;
+
+ public DrawingSelectionRecord(RecordInputStream in) {
+ _header = new OfficeArtRecordHeader(in);
+ _cpsp = in.readInt();
+ _dgslk = in.readInt();
+ _spidFocus = in.readInt();
+ int nShapes = in.available() / 4;
+ int[] shapeIds = new int[nShapes];
+ for (int i = 0; i < nShapes; i++) {
+ shapeIds[i] = in.readInt();
+ }
+ _shapeIds = shapeIds;
+ }
+
+ public short getSid() {
+ return sid;
+ }
+
+ protected int getDataSize() {
+ return OfficeArtRecordHeader.ENCODED_SIZE
+ + 12 // 3 int fields
+ + _shapeIds.length * 4;
+ }
+
+ public int serialize(int offset, byte[] data) {
+ int dataSize = getDataSize();
+ int recSize = 4 + dataSize;
+ LittleEndianOutput out = new LittleEndianByteArrayOutputStream(data, offset, recSize);
+ out.writeShort(sid);
+ out.writeShort(dataSize);
+ _header.serialize(out);
+ out.writeInt(_cpsp);
+ out.writeInt(_dgslk);
+ out.writeInt(_spidFocus);
+ for (int i = 0; i < _shapeIds.length; i++) {
+ out.writeInt(_shapeIds[i]);
+ }
+ return recSize;
+ }
+
+ public Object clone() {
+ // currently immutable
+ return this;
+ }
+
+ public String toString() {
+ StringBuffer sb = new StringBuffer();
+
+ sb.append("[MSODRAWINGSELECTION]\n");
+ sb.append(" .rh =(").append(_header.debugFormatAsString()).append(")\n");
+ sb.append(" .cpsp =").append(HexDump.intToHex(_cpsp)).append('\n');
+ sb.append(" .dgslk =").append(HexDump.intToHex(_dgslk)).append('\n');
+ sb.append(" .spidFocus=").append(HexDump.intToHex(_spidFocus)).append('\n');
+ sb.append(" .shapeIds =(");
+ for (int i = 0; i < _shapeIds.length; i++) {
+ if (i > 0) {
+ sb.append(", ");
+ }
+ sb.append(HexDump.intToHex(_shapeIds[i]));
+ }
+ sb.append(")\n");
+
+ sb.append("[/MSODRAWINGSELECTION]\n");
+ return sb.toString();
+ }
}
diff --git a/src/java/org/apache/poi/hssf/record/RecordFactory.java b/src/java/org/apache/poi/hssf/record/RecordFactory.java
index 7539e597da..bae867e512 100644
--- a/src/java/org/apache/poi/hssf/record/RecordFactory.java
+++ b/src/java/org/apache/poi/hssf/record/RecordFactory.java
@@ -49,7 +49,7 @@ public final class RecordFactory {
* contains the classes for all the records we want to parse.<br/>
* Note - this most but not *every* subclass of Record.
*/
- private static final Class[] records = {
+ private static final Class[] recordClasses = {
ArrayRecord.class,
BackupRecord.class,
BlankRecord.class,
@@ -163,7 +163,7 @@ public final class RecordFactory {
/**
* cache of the recordsToMap();
*/
- private static Map recordsMap = recordsToMap(records);
+ private static Map recordsMap = recordsToMap(recordClasses);
private static short[] _allKnownRecordSIDs;
@@ -172,16 +172,33 @@ public final class RecordFactory {
* are returned digested into the non-mul form.
*/
public static Record [] createRecord(RecordInputStream in) {
+
+ Record record = createSingleRecord(in);
+ if (record instanceof DBCellRecord) {
+ // Not needed by POI. Regenerated from scratch by POI when spreadsheet is written
+ return new Record[] { null, };
+ }
+ if (record instanceof RKRecord) {
+ return new Record[] { convertToNumberRecord((RKRecord) record), };
+ }
+ if (record instanceof MulRKRecord) {
+ return convertRKRecords((MulRKRecord)record);
+ }
+ if (record instanceof MulBlankRecord) {
+ return convertMulBlankRecords((MulBlankRecord)record);
+ }
+ return new Record[] { record, };
+ }
+
+ private static Record createSingleRecord(RecordInputStream in) {
Constructor constructor = (Constructor) recordsMap.get(new Short(in.getSid()));
if (constructor == null) {
- return new Record[] { new UnknownRecord(in), };
+ return new UnknownRecord(in);
}
- Record retval;
-
try {
- retval = ( Record ) constructor.newInstance(new Object[] { in });
+ return (Record) constructor.newInstance(new Object[] { in });
} catch (InvocationTargetException e) {
throw new RecordFormatException("Unable to construct record instance" , e.getTargetException());
} catch (IllegalArgumentException e) {
@@ -191,54 +208,55 @@ public final class RecordFactory {
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
-
- if (retval instanceof RKRecord) {
- // RK record is a slightly smaller alternative to NumberRecord
- // POI likes NumberRecord better
- RKRecord rk = ( RKRecord ) retval;
- NumberRecord num = new NumberRecord();
+ }
- num.setColumn(rk.getColumn());
- num.setRow(rk.getRow());
- num.setXFIndex(rk.getXFIndex());
- num.setValue(rk.getRKNumber());
- return new Record[] { num, };
- }
- if (retval instanceof DBCellRecord) {
- // Not needed by POI. Regenerated from scratch by POI when spreadsheet is written
- return new Record[] { null, };
- }
- // expand multiple records where necessary
- if (retval instanceof MulRKRecord) {
- MulRKRecord mrk = ( MulRKRecord ) retval;
+ /**
+ * RK record is a slightly smaller alternative to NumberRecord
+ * POI likes NumberRecord better
+ */
+ private static NumberRecord convertToNumberRecord(RKRecord rk) {
+ NumberRecord num = new NumberRecord();
+
+ num.setColumn(rk.getColumn());
+ num.setRow(rk.getRow());
+ num.setXFIndex(rk.getXFIndex());
+ num.setValue(rk.getRKNumber());
+ return num;
+ }
- Record[] mulRecs = new Record[ mrk.getNumColumns() ];
- for (int k = 0; k < mrk.getNumColumns(); k++) {
- NumberRecord nr = new NumberRecord();
+ /**
+ * Converts a {@link MulRKRecord} into an equivalent array of {@link NumberRecord}s
+ */
+ private static NumberRecord[] convertRKRecords(MulRKRecord mrk) {
- nr.setColumn(( short ) (k + mrk.getFirstColumn()));
- nr.setRow(mrk.getRow());
- nr.setXFIndex(mrk.getXFAt(k));
- nr.setValue(mrk.getRKNumberAt(k));
- mulRecs[ k ] = nr;
- }
- return mulRecs;
+ NumberRecord[] mulRecs = new NumberRecord[mrk.getNumColumns()];
+ for (int k = 0; k < mrk.getNumColumns(); k++) {
+ NumberRecord nr = new NumberRecord();
+
+ nr.setColumn((short) (k + mrk.getFirstColumn()));
+ nr.setRow(mrk.getRow());
+ nr.setXFIndex(mrk.getXFAt(k));
+ nr.setValue(mrk.getRKNumberAt(k));
+ mulRecs[k] = nr;
}
- if (retval instanceof MulBlankRecord) {
- MulBlankRecord mb = ( MulBlankRecord ) retval;
+ return mulRecs;
+ }
- Record[] mulRecs = new Record[ mb.getNumColumns() ];
- for (int k = 0; k < mb.getNumColumns(); k++) {
- BlankRecord br = new BlankRecord();
+ /**
+ * Converts a {@link MulBlankRecord} into an equivalent array of {@link BlankRecord}s
+ */
+ private static BlankRecord[] convertMulBlankRecords(MulBlankRecord mb) {
- br.setColumn(( short ) (k + mb.getFirstColumn()));
- br.setRow(mb.getRow());
- br.setXFIndex(mb.getXFAt(k));
- mulRecs[ k ] = br;
- }
- return mulRecs;
+ BlankRecord[] mulRecs = new BlankRecord[mb.getNumColumns()];
+ for (int k = 0; k < mb.getNumColumns(); k++) {
+ BlankRecord br = new BlankRecord();
+
+ br.setColumn((short) (k + mb.getFirstColumn()));
+ br.setRow(mb.getRow());
+ br.setXFIndex(mb.getXFAt(k));
+ mulRecs[k] = br;
}
- return new Record[] { retval, };
+ return mulRecs;
}
/**
@@ -325,19 +343,26 @@ public final class RecordFactory {
// After EOF, Excel seems to pad block with zeros
continue;
}
- Record[] recs = createRecord(recStream); // handle MulRK records
+ Record record = createSingleRecord(recStream);
- if (recs.length > 1) {
- for (int k = 0; k < recs.length; k++) {
- records.add(recs[ k ]); // these will be number records
- }
+ if (record instanceof DBCellRecord) {
+ // Not needed by POI. Regenerated from scratch by POI when spreadsheet is written
continue;
}
- Record record = recs[ 0 ];
- if (record == null) {
+ if (record instanceof RKRecord) {
+ records.add(convertToNumberRecord((RKRecord) record));
+ continue;
+ }
+ if (record instanceof MulRKRecord) {
+ addAll(records, convertRKRecords((MulRKRecord)record));
+ continue;
+ }
+ if (record instanceof MulBlankRecord) {
+ addAll(records, convertMulBlankRecords((MulBlankRecord)record));
continue;
}
+
if (record.getSid() == DrawingGroupRecord.sid
&& lastRecord instanceof DrawingGroupRecord) {
DrawingGroupRecord lastDGRecord = (DrawingGroupRecord) lastRecord;
@@ -354,8 +379,6 @@ public final class RecordFactory {
records.add(record);
} else if (lastRecord instanceof DrawingGroupRecord) {
((DrawingGroupRecord)lastRecord).processContinueRecord(contRec.getData());
- } else if (lastRecord instanceof StringRecord) {
- ((StringRecord)lastRecord).processContinueRecord(contRec.getData());
} else if (lastRecord instanceof UnknownRecord) {
//Gracefully handle records that we don't know about,
//that happen to be continued
@@ -373,4 +396,10 @@ public final class RecordFactory {
}
return records;
}
+
+ private static void addAll(List destList, Record[] srcRecs) {
+ for (int i = 0; i < srcRecs.length; i++) {
+ destList.add(srcRecs[i]);
+ }
+ }
}
diff --git a/src/java/org/apache/poi/hssf/record/RecordInputStream.java b/src/java/org/apache/poi/hssf/record/RecordInputStream.java
index b66bf0e96d..2f275ed928 100755
--- a/src/java/org/apache/poi/hssf/record/RecordInputStream.java
+++ b/src/java/org/apache/poi/hssf/record/RecordInputStream.java
@@ -320,16 +320,6 @@ public final class RecordInputStream extends InputStream implements LittleEndian
}
}
- /** Returns an excel style unicode string from the bytes reminaing in the record.
- * <i>Note:</i> Unicode strings differ from <b>normal</b> strings due to the addition of
- * formatting information.
- *
- * @return The unicode string representation of the remaining bytes.
- */
- public UnicodeString readUnicodeString() {
- return new UnicodeString(this);
- }
-
/** Returns the remaining bytes for the current record.
*
* @return The remaining bytes of the current record.
diff --git a/src/java/org/apache/poi/hssf/record/SSTRecord.java b/src/java/org/apache/poi/hssf/record/SSTRecord.java
index 7c56d955ec..98bd075e34 100644
--- a/src/java/org/apache/poi/hssf/record/SSTRecord.java
+++ b/src/java/org/apache/poi/hssf/record/SSTRecord.java
@@ -17,14 +17,16 @@
package org.apache.poi.hssf.record;
+import java.util.Iterator;
+
+import org.apache.poi.hssf.record.cont.ContinuableRecord;
+import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.IntMapper;
import org.apache.poi.util.LittleEndianConsts;
-import java.util.Iterator;
-
/**
- * Title: Static String Table Record
- * <P>
+ * Title: Static String Table Record (0x00FC)<p/>
+ *
* Description: This holds all the strings for LabelSSTRecords.
* <P>
* REFERENCE: PG 389 Microsoft Excel 97 Developer's Kit (ISBN:
@@ -37,27 +39,20 @@ import java.util.Iterator;
* @see org.apache.poi.hssf.record.LabelSSTRecord
* @see org.apache.poi.hssf.record.ContinueRecord
*/
-public final class SSTRecord extends Record {
+public final class SSTRecord extends ContinuableRecord {
public static final short sid = 0x00FC;
- private static UnicodeString EMPTY_STRING = new UnicodeString("");
-
- /** how big can an SST record be? As big as any record can be: 8228 bytes */
- static final int MAX_RECORD_SIZE = 8228;
+ private static final UnicodeString EMPTY_STRING = new UnicodeString("");
+ // TODO - move these constants to test class (the only consumer)
/** standard record overhead: two shorts (record id plus data space size)*/
- static final int STD_RECORD_OVERHEAD =
- 2 * LittleEndianConsts.SHORT_SIZE;
+ static final int STD_RECORD_OVERHEAD = 2 * LittleEndianConsts.SHORT_SIZE;
/** SST overhead: the standard record overhead, plus the number of strings and the number of unique strings -- two ints */
- static final int SST_RECORD_OVERHEAD =
- ( STD_RECORD_OVERHEAD + ( 2 * LittleEndianConsts.INT_SIZE ) );
+ static final int SST_RECORD_OVERHEAD = STD_RECORD_OVERHEAD + 2 * LittleEndianConsts.INT_SIZE;
/** how much data can we stuff into an SST record? That would be _max minus the standard SST record overhead */
- static final int MAX_DATA_SPACE = MAX_RECORD_SIZE - SST_RECORD_OVERHEAD;
-
- /** overhead for each string includes the string's character count (a short) and the flag describing its characteristics (a byte) */
- static final int STRING_MINIMAL_OVERHEAD = LittleEndianConsts.SHORT_SIZE + LittleEndianConsts.BYTE_SIZE;
+ static final int MAX_DATA_SPACE = RecordInputStream.MAX_RECORD_DATA_SIZE - 8;
/** union of strings in the SST and EXTSST */
private int field_1_num_strings;
@@ -133,37 +128,6 @@ public final class SSTRecord extends Record {
return field_2_num_unique_strings;
}
- /**
- * USE THIS METHOD AT YOUR OWN PERIL: THE <code>addString</code>
- * METHODS MANIPULATE THE NUMBER OF STRINGS AS A SIDE EFFECT; YOUR
- * ATTEMPTS AT MANIPULATING THE STRING COUNT IS LIKELY TO BE VERY
- * WRONG AND WILL RESULT IN BAD BEHAVIOR WHEN THIS RECORD IS
- * WRITTEN OUT AND ANOTHER PROCESS ATTEMPTS TO READ THE RECORD
- *
- * @param count number of strings
- *
- */
-
- public void setNumStrings( final int count )
- {
- field_1_num_strings = count;
- }
-
- /**
- * USE THIS METHOD AT YOUR OWN PERIL: THE <code>addString</code>
- * METHODS MANIPULATE THE NUMBER OF UNIQUE STRINGS AS A SIDE
- * EFFECT; YOUR ATTEMPTS AT MANIPULATING THE UNIQUE STRING COUNT
- * IS LIKELY TO BE VERY WRONG AND WILL RESULT IN BAD BEHAVIOR WHEN
- * THIS RECORD IS WRITTEN OUT AND ANOTHER PROCESS ATTEMPTS TO READ
- * THE RECORD
- *
- * @param count number of strings
- */
-
- public void setNumUniqueStrings( final int count )
- {
- field_2_num_unique_strings = count;
- }
/**
* Get a particular string by its index
@@ -178,11 +142,6 @@ public final class SSTRecord extends Record {
return (UnicodeString) field_3_strings.get( id );
}
- public boolean isString16bit( final int id )
- {
- UnicodeString unicodeString = ( (UnicodeString) field_3_strings.get( id ) );
- return ( ( unicodeString.getOptionFlags() & 0x01 ) == 1 );
- }
/**
* Return a debugging string representation
@@ -350,29 +309,11 @@ public final class SSTRecord extends Record {
return field_3_strings.size();
}
- /**
- * called by the class that is responsible for writing this sucker.
- * Subclasses should implement this so that their data is passed back in a
- * byte array.
- *
- * @return size
- */
-
- public int serialize( int offset, byte[] data )
- {
- SSTSerializer serializer = new SSTSerializer(
- field_3_strings, getNumStrings(), getNumUniqueStrings() );
- int bytes = serializer.serialize( offset, data );
+ protected void serialize(ContinuableRecordOutput out) {
+ SSTSerializer serializer = new SSTSerializer(field_3_strings, getNumStrings(), getNumUniqueStrings() );
+ serializer.serialize(out);
bucketAbsoluteOffsets = serializer.getBucketAbsoluteOffsets();
bucketRelativeOffsets = serializer.getBucketRelativeOffsets();
- return bytes;
- }
-
-
- protected int getDataSize() {
- SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(field_3_strings);
- int recordSize = calculator.getRecordSize();
- return recordSize-4;
}
SSTDeserializer getDeserializer()
diff --git a/src/java/org/apache/poi/hssf/record/SSTRecordHeader.java b/src/java/org/apache/poi/hssf/record/SSTRecordHeader.java
deleted file mode 100644
index e5696111f8..0000000000
--- a/src/java/org/apache/poi/hssf/record/SSTRecordHeader.java
+++ /dev/null
@@ -1,76 +0,0 @@
-
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.hssf.record;
-
-import org.apache.poi.util.LittleEndian;
-import org.apache.poi.util.LittleEndianConsts;
-
-/**
- * Write out an SST header record.
- *
- * @author Glen Stampoultzis (glens at apache.org)
- */
-class SSTRecordHeader
-{
- int numStrings;
- int numUniqueStrings;
-
- public SSTRecordHeader( int numStrings, int numUniqueStrings )
- {
- this.numStrings = numStrings;
- this.numUniqueStrings = numUniqueStrings;
- }
-
- /**
- * Writes out the SST record. This consists of the sid, the record size, the number of
- * strings and the number of unique strings.
- *
- * @param data The data buffer to write the header to.
- * @param bufferIndex The index into the data buffer where the header should be written.
- * @param recSize The number of records written.
- *
- * @return The bufer of bytes modified.
- */
- public int writeSSTHeader( UnicodeString.UnicodeRecordStats stats, byte[] data, int bufferIndex, int recSize )
- {
- int offset = bufferIndex;
-
- LittleEndian.putShort( data, offset, SSTRecord.sid );
- offset += LittleEndianConsts.SHORT_SIZE;
- stats.recordSize += LittleEndianConsts.SHORT_SIZE;
- stats.remainingSize -= LittleEndianConsts.SHORT_SIZE;
- //Delay writing the length
- stats.lastLengthPos = offset;
- offset += LittleEndianConsts.SHORT_SIZE;
- stats.recordSize += LittleEndianConsts.SHORT_SIZE;
- stats.remainingSize -= LittleEndianConsts.SHORT_SIZE;
- LittleEndian.putInt( data, offset, numStrings );
- offset += LittleEndianConsts.INT_SIZE;
- stats.recordSize += LittleEndianConsts.INT_SIZE;
- stats.remainingSize -= LittleEndianConsts.INT_SIZE;
- LittleEndian.putInt( data, offset, numUniqueStrings );
- offset += LittleEndianConsts.INT_SIZE;
- stats.recordSize += LittleEndianConsts.INT_SIZE;
- stats.remainingSize -= LittleEndianConsts.INT_SIZE;
-
- return offset - bufferIndex;
- }
-
-}
diff --git a/src/java/org/apache/poi/hssf/record/SSTRecordSizeCalculator.java b/src/java/org/apache/poi/hssf/record/SSTRecordSizeCalculator.java
deleted file mode 100644
index c10c21d83d..0000000000
--- a/src/java/org/apache/poi/hssf/record/SSTRecordSizeCalculator.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/* ====================================================================
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-==================================================================== */
-
-
-package org.apache.poi.hssf.record;
-
-import org.apache.poi.util.IntMapper;
-
-/**
- * Used to calculate the record sizes for a particular record. This kind of
- * sucks because it's similar to the SST serialization code. In general
- * the SST serialization code needs to be rewritten.
- *
- * @author Glen Stampoultzis (glens at apache.org)
- * @author Jason Height (jheight at apache.org)
- */
-class SSTRecordSizeCalculator
-{
- private IntMapper strings;
-
- public SSTRecordSizeCalculator(IntMapper strings)
- {
- this.strings = strings;
- }
-
- public int getRecordSize() {
- UnicodeString.UnicodeRecordStats rs = new UnicodeString.UnicodeRecordStats();
- rs.remainingSize -= SSTRecord.SST_RECORD_OVERHEAD;
- rs.recordSize += SSTRecord.SST_RECORD_OVERHEAD;
- for (int i=0; i < strings.size(); i++ )
- {
- UnicodeString unistr = ( (UnicodeString) strings.get(i));
- unistr.getRecordSize(rs);
- }
- return rs.recordSize;
- }
-}
diff --git a/src/java/org/apache/poi/hssf/record/SSTSerializer.java b/src/java/org/apache/poi/hssf/record/SSTSerializer.java
index 3f97fa3e3a..78844deb30 100644
--- a/src/java/org/apache/poi/hssf/record/SSTSerializer.java
+++ b/src/java/org/apache/poi/hssf/record/SSTSerializer.java
@@ -1,4 +1,3 @@
-
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@@ -15,12 +14,11 @@
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
-
package org.apache.poi.hssf.record;
+import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.IntMapper;
-import org.apache.poi.util.LittleEndian;
/**
* This class handles serialization of SST records. It utilizes the record processor
@@ -28,71 +26,50 @@ import org.apache.poi.util.LittleEndian;
*
* @author Glen Stampoultzis (glens at apache.org)
*/
-class SSTSerializer
-{
+final class SSTSerializer {
- // todo: make private again
- private IntMapper strings;
+ private final int _numStrings;
+ private final int _numUniqueStrings;
- private SSTRecordHeader sstRecordHeader;
+ private final IntMapper strings;
/** Offsets from the beginning of the SST record (even across continuations) */
- int[] bucketAbsoluteOffsets;
+ private final int[] bucketAbsoluteOffsets;
/** Offsets relative the start of the current SST or continue record */
- int[] bucketRelativeOffsets;
+ private final int[] bucketRelativeOffsets;
int startOfSST, startOfRecord;
public SSTSerializer( IntMapper strings, int numStrings, int numUniqueStrings )
{
this.strings = strings;
- this.sstRecordHeader = new SSTRecordHeader( numStrings, numUniqueStrings );
+ _numStrings = numStrings;
+ _numUniqueStrings = numUniqueStrings;
int infoRecs = ExtSSTRecord.getNumberOfInfoRecsForStrings(strings.size());
this.bucketAbsoluteOffsets = new int[infoRecs];
this.bucketRelativeOffsets = new int[infoRecs];
}
- /**
- * Create a byte array consisting of an SST record and any
- * required Continue records, ready to be written out.
- * <p>
- * If an SST record and any subsequent Continue records are read
- * in to create this instance, this method should produce a byte
- * array that is identical to the byte array produced by
- * concatenating the input records' data.
- *
- * @return the byte array
- */
- public int serialize(int offset, byte[] data )
- {
- UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats();
- sstRecordHeader.writeSSTHeader( stats, data, 0 + offset, 0 );
- int pos = offset + SSTRecord.SST_RECORD_OVERHEAD;
+ public void serialize(ContinuableRecordOutput out) {
+ out.writeInt(_numStrings);
+ out.writeInt(_numUniqueStrings);
for ( int k = 0; k < strings.size(); k++ )
{
if (k % ExtSSTRecord.DEFAULT_BUCKET_SIZE == 0)
{
+ int rOff = out.getTotalSize();
int index = k/ExtSSTRecord.DEFAULT_BUCKET_SIZE;
if (index < ExtSSTRecord.MAX_BUCKETS) {
//Excel only indexes the first 128 buckets.
- bucketAbsoluteOffsets[index] = pos-offset;
- bucketRelativeOffsets[index] = pos-offset;
- }
+ bucketAbsoluteOffsets[index] = rOff;
+ bucketRelativeOffsets[index] = rOff;
+ }
}
UnicodeString s = getUnicodeString(k);
- pos += s.serialize(stats, pos, data);
- }
- //Check to see if there is a hanging continue record length
- if (stats.lastLengthPos != -1) {
- short lastRecordLength = (short)(pos - stats.lastLengthPos-2);
- if (lastRecordLength > 8224)
- throw new InternalError();
-
- LittleEndian.putShort(data, stats.lastLengthPos, lastRecordLength);
- }
- return pos - offset;
- }
+ s.serialize(out);
+ }
+ }
private UnicodeString getUnicodeString( int index )
diff --git a/src/java/org/apache/poi/hssf/record/StringRecord.java b/src/java/org/apache/poi/hssf/record/StringRecord.java
index 620a07e093..45322bea4b 100644
--- a/src/java/org/apache/poi/hssf/record/StringRecord.java
+++ b/src/java/org/apache/poi/hssf/record/StringRecord.java
@@ -17,19 +17,23 @@
package org.apache.poi.hssf.record;
-import org.apache.poi.util.LittleEndian;
+import org.apache.poi.hssf.record.cont.ContinuableRecord;
+import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.StringUtil;
/**
- * Supports the STRING record structure. (0x0207)
+ * STRING (0x0207)<p/>
+ *
+ * Stores the cached result of a text formula
*
* @author Glen Stampoultzis (glens at apache.org)
*/
-public class StringRecord extends Record {
- public final static short sid = 0x0207;
- private int field_1_string_length;
- private byte field_2_unicode_flag;
- private String field_3_string;
+public final class StringRecord extends ContinuableRecord {
+
+ public final static short sid = 0x0207;
+
+ private boolean _is16bitUnicode;
+ private String _text;
public StringRecord()
@@ -39,77 +43,24 @@ public class StringRecord extends Record {
/**
* @param in the RecordInputstream to read the record from
*/
- public StringRecord( RecordInputStream in)
- {
- field_1_string_length = in.readShort();
- field_2_unicode_flag = in.readByte();
- byte[] data = in.readRemainder();
- //Why isn't this using the in.readString methods???
- if (isUnCompressedUnicode())
- {
- field_3_string = StringUtil.getFromUnicodeLE(data, 0, field_1_string_length );
- }
- else
- {
- field_3_string = StringUtil.getFromCompressedUnicode(data, 0, field_1_string_length);
+ public StringRecord( RecordInputStream in) {
+ int field_1_string_length = in.readUShort();
+ _is16bitUnicode = in.readByte() != 0x00;
+
+ if (_is16bitUnicode){
+ _text = in.readUnicodeLEString(field_1_string_length);
+ } else {
+ _text = in.readCompressedUnicode(field_1_string_length);
}
}
-
- public void processContinueRecord(byte[] data) {
- if(isUnCompressedUnicode()) {
- field_3_string += StringUtil.getFromUnicodeLE(data, 0, field_1_string_length - field_3_string.length());
- } else {
- field_3_string += StringUtil.getFromCompressedUnicode(data, 0, field_1_string_length - field_3_string.length());
- }
- }
- private int getStringByteLength()
- {
- return isUnCompressedUnicode() ? field_1_string_length * 2 : field_1_string_length;
- }
-
- protected int getDataSize() {
- return 2 + 1 + getStringByteLength();
- }
- /**
- * is this uncompressed unicode (16bit)? Or just 8-bit compressed?
- * @return isUnicode - True for 16bit- false for 8bit
- */
- public boolean isUnCompressedUnicode()
- {
- return (field_2_unicode_flag == 1);
+ protected void serialize(ContinuableRecordOutput out) {
+ out.writeShort(_text.length());
+ out.writeStringData(_text);
}
- /**
- * called by the class that is responsible for writing this sucker.
- * Subclasses should implement this so that their data is passed back in a
- * byte array.
- *
- * @param offset to begin writing at
- * @param data byte array containing instance data
- * @return number of bytes written
- */
- public int serialize( int offset, byte[] data )
- {
- LittleEndian.putUShort(data, 0 + offset, sid);
- LittleEndian.putUShort(data, 2 + offset, 3 + getStringByteLength());
- LittleEndian.putUShort(data, 4 + offset, field_1_string_length);
- data[6 + offset] = field_2_unicode_flag;
- if (isUnCompressedUnicode())
- {
- StringUtil.putUnicodeLE(field_3_string, data, 7 + offset);
- }
- else
- {
- StringUtil.putCompressedUnicode(field_3_string, data, 7 + offset);
- }
- return getRecordSize();
- }
- /**
- * return the non static version of the id for this record.
- */
public short getSid()
{
return sid;
@@ -120,26 +71,16 @@ public class StringRecord extends Record {
*/
public String getString()
{
- return field_3_string;
+ return _text;
}
- /**
- * Sets whether the string is compressed or not
- * @param unicode_flag 1 = uncompressed, 0 = compressed
- */
- public void setCompressedFlag( byte unicode_flag )
- {
- this.field_2_unicode_flag = unicode_flag;
- }
/**
* Sets the string represented by this record.
*/
- public void setString( String string )
- {
- this.field_1_string_length = string.length();
- this.field_3_string = string;
- setCompressedFlag(StringUtil.hasMultibyte(string) ? (byte)1 : (byte)0);
+ public void setString(String string) {
+ _text = string;
+ _is16bitUnicode = StringUtil.hasMultibyte(string);
}
public String toString()
@@ -148,16 +89,15 @@ public class StringRecord extends Record {
buffer.append("[STRING]\n");
buffer.append(" .string = ")
- .append(field_3_string).append("\n");
+ .append(_text).append("\n");
buffer.append("[/STRING]\n");
return buffer.toString();
}
public Object clone() {
StringRecord rec = new StringRecord();
- rec.field_1_string_length = this.field_1_string_length;
- rec.field_2_unicode_flag= this.field_2_unicode_flag;
- rec.field_3_string = this.field_3_string;
+ rec._is16bitUnicode= _is16bitUnicode;
+ rec._text = _text;
return rec;
}
}
diff --git a/src/java/org/apache/poi/hssf/record/SupBookRecord.java b/src/java/org/apache/poi/hssf/record/SupBookRecord.java
index a58c5c8c97..5cbfd42d6a 100644
--- a/src/java/org/apache/poi/hssf/record/SupBookRecord.java
+++ b/src/java/org/apache/poi/hssf/record/SupBookRecord.java
@@ -17,11 +17,12 @@
package org.apache.poi.hssf.record;
-import org.apache.poi.hssf.record.UnicodeString.UnicodeRecordStats;
-import org.apache.poi.util.LittleEndian;
+import org.apache.poi.util.LittleEndianByteArrayOutputStream;
+import org.apache.poi.util.LittleEndianOutput;
+import org.apache.poi.util.StringUtil;
/**
- * Title: Sup Book (EXTERNALBOOK) <P>
+ * Title: Sup Book - EXTERNALBOOK (0x01AE) <p/>
* Description: A External Workbook Description (Supplemental Book)
* Its only a dummy record for making new ExternSheet Record <P>
* REFERENCE: 5.38<P>
@@ -31,25 +32,25 @@ import org.apache.poi.util.LittleEndian;
*/
public final class SupBookRecord extends Record {
- public final static short sid = 0x1AE;
+ public final static short sid = 0x01AE;
private static final short SMALL_RECORD_SIZE = 4;
private static final short TAG_INTERNAL_REFERENCES = 0x0401;
private static final short TAG_ADD_IN_FUNCTIONS = 0x3A01;
- private short field_1_number_of_sheets;
- private UnicodeString field_2_encoded_url;
- private UnicodeString[] field_3_sheet_names;
- private boolean _isAddInFunctions;
+ private short field_1_number_of_sheets;
+ private String field_2_encoded_url;
+ private String[] field_3_sheet_names;
+ private boolean _isAddInFunctions;
+
-
public static SupBookRecord createInternalReferences(short numberOfSheets) {
return new SupBookRecord(false, numberOfSheets);
}
public static SupBookRecord createAddInFunctions() {
return new SupBookRecord(true, (short)0);
}
- public static SupBookRecord createExternalReferences(UnicodeString url, UnicodeString[] sheetNames) {
+ public static SupBookRecord createExternalReferences(String url, String[] sheetNames) {
return new SupBookRecord(url, sheetNames);
}
private SupBookRecord(boolean isAddInFuncs, short numberOfSheets) {
@@ -59,7 +60,7 @@ public final class SupBookRecord extends Record {
field_3_sheet_names = null;
_isAddInFunctions = isAddInFuncs;
}
- public SupBookRecord(UnicodeString url, UnicodeString[] sheetNames) {
+ public SupBookRecord(String url, String[] sheetNames) {
field_1_number_of_sheets = (short) sheetNames.length;
field_2_encoded_url = url;
field_3_sheet_names = sheetNames;
@@ -84,18 +85,18 @@ public final class SupBookRecord extends Record {
* @param offset of the record's data (provided a big array of the file)
*/
public SupBookRecord(RecordInputStream in) {
- int recLen = in.remaining();
-
+ int recLen = in.remaining();
+
field_1_number_of_sheets = in.readShort();
-
+
if(recLen > SMALL_RECORD_SIZE) {
// 5.38.1 External References
_isAddInFunctions = false;
- field_2_encoded_url = in.readUnicodeString();
- UnicodeString[] sheetNames = new UnicodeString[field_1_number_of_sheets];
+ field_2_encoded_url = in.readString();
+ String[] sheetNames = new String[field_1_number_of_sheets];
for (int i = 0; i < sheetNames.length; i++) {
- sheetNames[i] = in.readUnicodeString();
+ sheetNames[i] = in.readString();
}
field_3_sheet_names = sheetNames;
return;
@@ -103,7 +104,7 @@ public final class SupBookRecord extends Record {
// else not 'External References'
field_2_encoded_url = null;
field_3_sheet_names = null;
-
+
short nextShort = in.readShort();
if(nextShort == TAG_INTERNAL_REFERENCES) {
// 5.38.2 'Internal References'
@@ -116,7 +117,7 @@ public final class SupBookRecord extends Record {
+ field_1_number_of_sheets + ")");
}
} else {
- throw new RuntimeException("invalid EXTERNALBOOK code ("
+ throw new RuntimeException("invalid EXTERNALBOOK code ("
+ Integer.toHexString(nextShort) + ")");
}
}
@@ -124,7 +125,7 @@ public final class SupBookRecord extends Record {
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append(getClass().getName()).append(" [SUPBOOK ");
-
+
if(isExternalReferences()) {
sb.append("External References");
sb.append(" nSheets=").append(field_1_number_of_sheets);
@@ -143,18 +144,14 @@ public final class SupBookRecord extends Record {
return SMALL_RECORD_SIZE;
}
int sum = 2; // u16 number of sheets
- UnicodeRecordStats urs = new UnicodeRecordStats();
- field_2_encoded_url.getRecordSize(urs);
- sum += urs.recordSize;
-
+
+ sum += StringUtil.getEncodedSize(field_2_encoded_url);
+
for(int i=0; i<field_3_sheet_names.length; i++) {
- urs = new UnicodeRecordStats();
- field_3_sheet_names[i].getRecordSize(urs);
- sum += urs.recordSize;
+ sum += StringUtil.getEncodedSize(field_3_sheet_names[i]);
}
return sum;
}
-
/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
@@ -165,29 +162,26 @@ public final class SupBookRecord extends Record {
* @return number of bytes written
*/
public int serialize(int offset, byte [] data) {
- LittleEndian.putShort(data, 0 + offset, sid);
int dataSize = getDataSize();
- LittleEndian.putShort(data, 2 + offset, (short) dataSize);
- LittleEndian.putShort(data, 4 + offset, field_1_number_of_sheets);
-
+ int recordSize = 4 + dataSize;
+ LittleEndianOutput out = new LittleEndianByteArrayOutputStream(data, offset, recordSize);
+
+ out.writeShort(sid);
+ out.writeShort(dataSize);
+ out.writeShort(field_1_number_of_sheets);
+
if(isExternalReferences()) {
-
- int currentOffset = 6 + offset;
- UnicodeRecordStats urs = new UnicodeRecordStats();
- field_2_encoded_url.serialize(urs, currentOffset, data);
- currentOffset += urs.recordSize;
-
+ StringUtil.writeUnicodeString(out, field_2_encoded_url);
+
for(int i=0; i<field_3_sheet_names.length; i++) {
- urs = new UnicodeRecordStats();
- field_3_sheet_names[i].serialize(urs, currentOffset, data);
- currentOffset += urs.recordSize;
+ StringUtil.writeUnicodeString(out, field_3_sheet_names[i]);
}
} else {
- short field2val = _isAddInFunctions ? TAG_ADD_IN_FUNCTIONS : TAG_INTERNAL_REFERENCES;
-
- LittleEndian.putShort(data, 6 + offset, field2val);
+ int field2val = _isAddInFunctions ? TAG_ADD_IN_FUNCTIONS : TAG_INTERNAL_REFERENCES;
+
+ out.writeShort(field2val);
}
- return dataSize + 4;
+ return recordSize;
}
public void setNumberOfSheets(short number){
@@ -203,7 +197,7 @@ public final class SupBookRecord extends Record {
return sid;
}
public String getURL() {
- String encodedUrl = field_2_encoded_url.getString();
+ String encodedUrl = field_2_encoded_url;
switch(encodedUrl.charAt(0)) {
case 0: // Reference to an empty workbook name
return encodedUrl.substring(1); // will this just be empty string?
@@ -211,7 +205,7 @@ public final class SupBookRecord extends Record {
return decodeFileName(encodedUrl);
case 2: // Self-referential external reference
return encodedUrl.substring(1);
-
+
}
return encodedUrl;
}
@@ -219,18 +213,18 @@ public final class SupBookRecord extends Record {
return encodedUrl.substring(1);
// TODO the following special characters may appear in the rest of the string, and need to get interpreted
/* see "MICROSOFT OFFICE EXCEL 97-2007 BINARY FILE FORMAT SPECIFICATION"
- chVolume 1
- chSameVolume 2
+ chVolume 1
+ chSameVolume 2
chDownDir 3
- chUpDir 4
+ chUpDir 4
chLongVolume 5
chStartupDir 6
chAltStartupDir 7
chLibDir 8
-
+
*/
}
- public UnicodeString[] getSheetNames() {
- return (UnicodeString[]) field_3_sheet_names.clone();
+ public String[] getSheetNames() {
+ return (String[]) field_3_sheet_names.clone();
}
}
diff --git a/src/java/org/apache/poi/hssf/record/TextObjectRecord.java b/src/java/org/apache/poi/hssf/record/TextObjectRecord.java
index 0a50e181b2..019aab09e8 100644
--- a/src/java/org/apache/poi/hssf/record/TextObjectRecord.java
+++ b/src/java/org/apache/poi/hssf/record/TextObjectRecord.java
@@ -17,16 +17,13 @@
package org.apache.poi.hssf.record;
-import java.io.UnsupportedEncodingException;
-
+import org.apache.poi.hssf.record.cont.ContinuableRecord;
+import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.hssf.record.formula.Ptg;
import org.apache.poi.hssf.usermodel.HSSFRichTextString;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
import org.apache.poi.util.HexDump;
-import org.apache.poi.util.LittleEndian;
-import org.apache.poi.util.LittleEndianByteArrayOutputStream;
-import org.apache.poi.util.LittleEndianOutput;
/**
* The TXO record (0x01B6) is used to define the properties of a text box. It is
@@ -36,7 +33,7 @@ import org.apache.poi.util.LittleEndianOutput;
*
* @author Glen Stampoultzis (glens at apache.org)
*/
-public final class TextObjectRecord extends Record {
+public final class TextObjectRecord extends ContinuableRecord {
public final static short sid = 0x01B6;
private static final int FORMAT_RUN_ENCODED_SIZE = 8; // 2 shorts and 4 bytes reserved
@@ -163,30 +160,7 @@ public final class TextObjectRecord extends Record {
return sid;
}
- /**
- * Only for the current record. does not include any subsequent Continue
- * records
- */
- private int getCurrentRecordDataSize() {
- int result = 2 + 2 + 2 + 2 + 2 + 2 + 2 + 4;
- if (_linkRefPtg != null) {
- result += 2 // formula size
- + 4 // unknownInt
- +_linkRefPtg.getSize();
- if (_unknownPostFormulaByte != null) {
- result += 1;
- }
- }
- return result;
- }
-
- private int serializeTXORecord(int offset, byte[] data) {
- int dataSize = getCurrentRecordDataSize();
- int recSize = dataSize+4;
- LittleEndianOutput out = new LittleEndianByteArrayOutputStream(data, offset, recSize);
-
- out.writeShort(TextObjectRecord.sid);
- out.writeShort(dataSize);
+ private void serializeTXORecord(ContinuableRecordOutput out) {
out.writeShort(field_1_options);
out.writeShort(field_2_textOrientation);
@@ -206,79 +180,23 @@ public final class TextObjectRecord extends Record {
out.writeByte(_unknownPostFormulaByte.byteValue());
}
}
- return recSize;
}
- private int serializeTrailingRecords(int offset, byte[] data) {
- byte[] textBytes;
- try {
- textBytes = _text.getString().getBytes("UTF-16LE");
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e.getMessage(), e);
- }
- int remainingLength = textBytes.length;
-
- int countTextBytesWritten = 0;
- int pos = offset;
- // (regardless what was read, we always serialize double-byte
- // unicode characters (UTF-16LE).
- Byte unicodeFlag = new Byte((byte)1);
- while (remainingLength > 0) {
- int chunkSize = Math.min(RecordInputStream.MAX_RECORD_DATA_SIZE - 2, remainingLength);
- remainingLength -= chunkSize;
- pos += ContinueRecord.write(data, pos, unicodeFlag, textBytes, countTextBytesWritten, chunkSize);
- countTextBytesWritten += chunkSize;
- }
-
- byte[] formatData = createFormatData(_text);
- pos += ContinueRecord.write(data, pos, null, formatData);
- return pos - offset;
+ private void serializeTrailingRecords(ContinuableRecordOutput out) {
+ out.writeContinue();
+ out.writeStringData(_text.getString());
+ out.writeContinue();
+ writeFormatData(out, _text);
}
- private int getTrailingRecordsSize() {
- if (_text.length() < 1) {
- return 0;
- }
- int encodedTextSize = 0;
- int textBytesLength = _text.length() * LittleEndian.SHORT_SIZE;
- while (textBytesLength > 0) {
- int chunkSize = Math.min(RecordInputStream.MAX_RECORD_DATA_SIZE - 2, textBytesLength);
- textBytesLength -= chunkSize;
-
- encodedTextSize += 4; // +4 for ContinueRecord sid+size
- encodedTextSize += 1+chunkSize; // +1 for compressed unicode flag,
- }
-
- int encodedFormatSize = (_text.numFormattingRuns() + 1) * FORMAT_RUN_ENCODED_SIZE
- + 4; // +4 for ContinueRecord sid+size
- return encodedTextSize + encodedFormatSize;
- }
+ protected void serialize(ContinuableRecordOutput out) {
-
- public int serialize(int offset, byte[] data) {
-
- int expectedTotalSize = getRecordSize();
- int totalSize = serializeTXORecord(offset, data);
-
+ serializeTXORecord(out);
if (_text.getString().length() > 0) {
- totalSize += serializeTrailingRecords(offset+totalSize, data);
+ serializeTrailingRecords(out);
}
-
- if (totalSize != expectedTotalSize)
- throw new RecordFormatException(totalSize
- + " bytes written but getRecordSize() reports " + expectedTotalSize);
- return totalSize;
}
- /**
- * Note - this total size includes all potential {@link ContinueRecord}s written
- * but it is not the "ushort size" value to be written at the start of the first BIFF record
- */
- protected int getDataSize() {
- return getCurrentRecordDataSize() + getTrailingRecordsSize();
- }
-
-
private int getFormattingDataLength() {
if (_text.length() < 1) {
// important - no formatting data if text is empty
@@ -287,25 +205,17 @@ public final class TextObjectRecord extends Record {
return (_text.numFormattingRuns() + 1) * FORMAT_RUN_ENCODED_SIZE;
}
- private static byte[] createFormatData(HSSFRichTextString str) {
+ private static void writeFormatData(ContinuableRecordOutput out , HSSFRichTextString str) {
int nRuns = str.numFormattingRuns();
- byte[] result = new byte[(nRuns + 1) * FORMAT_RUN_ENCODED_SIZE];
- int pos = 0;
for (int i = 0; i < nRuns; i++) {
- LittleEndian.putUShort(result, pos, str.getIndexOfFormattingRun(i));
- pos += 2;
+ out.writeShort(str.getIndexOfFormattingRun(i));
int fontIndex = str.getFontOfFormattingRun(i);
- LittleEndian.putUShort(result, pos, fontIndex == str.NO_FONT ? 0 : fontIndex);
- pos += 2;
- pos += 4; // skip reserved
+ out.writeShort(fontIndex == str.NO_FONT ? 0 : fontIndex);
+ out.writeInt(0); // skip reserved
}
- LittleEndian.putUShort(result, pos, str.length());
- pos += 2;
- LittleEndian.putUShort(result, pos, 0);
- pos += 2;
- pos += 4; // skip reserved
-
- return result;
+ out.writeShort(str.length());
+ out.writeShort(0);
+ out.writeInt(0); // skip reserved
}
/**
diff --git a/src/java/org/apache/poi/hssf/record/UnicodeString.java b/src/java/org/apache/poi/hssf/record/UnicodeString.java
index 0494aa98ab..fc493d4348 100644
--- a/src/java/org/apache/poi/hssf/record/UnicodeString.java
+++ b/src/java/org/apache/poi/hssf/record/UnicodeString.java
@@ -17,75 +17,84 @@
package org.apache.poi.hssf.record;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
-import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.HexDump;
-
-import java.util.Iterator;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Collections;
+import org.apache.poi.util.LittleEndianInput;
+import org.apache.poi.util.LittleEndianOutput;
/**
- * Title: Unicode String<P>
- * Description: Unicode String record. We implement these as a record, although
- * they are really just standard fields that are in several records.
- * It is considered more desirable then repeating it in all of them.<P>
- * REFERENCE: PG 264 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)<P>
+ * Title: Unicode String<p/>
+ * Description: Unicode String - just standard fields that are in several records.
+ * It is considered more desirable then repeating it in all of them.<p/>
+ * REFERENCE: PG 264 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)<p/>
* @author Andrew C. Oliver
* @author Marc Johnson (mjohnson at apache dot org)
* @author Glen Stampoultzis (glens at apache.org)
*/
public final class UnicodeString implements Comparable {
- private short field_1_charCount; // = 0;
- private byte field_2_optionflags; // = 0;
- private String field_3_string; // = null;
+ private short field_1_charCount;
+ private byte field_2_optionflags;
+ private String field_3_string;
private List field_4_format_runs;
private byte[] field_5_ext_rst;
- private static final BitField highByte = BitFieldFactory.getInstance(0x1);
- private static final BitField extBit = BitFieldFactory.getInstance(0x4);
- private static final BitField richText = BitFieldFactory.getInstance(0x8);
+ private static final BitField highByte = BitFieldFactory.getInstance(0x1);
+ private static final BitField extBit = BitFieldFactory.getInstance(0x4);
+ private static final BitField richText = BitFieldFactory.getInstance(0x8);
public static class FormatRun implements Comparable {
- short character;
- short fontIndex;
+ short character;
+ short fontIndex;
- public FormatRun(short character, short fontIndex) {
- this.character = character;
- this.fontIndex = fontIndex;
- }
+ public FormatRun(short character, short fontIndex) {
+ this.character = character;
+ this.fontIndex = fontIndex;
+ }
- public short getCharacterPos() {
- return character;
- }
+ public FormatRun(LittleEndianInput in) {
+ this(in.readShort(), in.readShort());
+ }
- public short getFontIndex() {
- return fontIndex;
- }
+ public short getCharacterPos() {
+ return character;
+ }
- public boolean equals(Object o) {
- if ((o == null) || (o.getClass() != this.getClass()))
- {
- return false;
+ public short getFontIndex() {
+ return fontIndex;
}
- FormatRun other = ( FormatRun ) o;
- return ((character == other.character) && (fontIndex == other.fontIndex));
- }
+ public boolean equals(Object o) {
+ if (!(o instanceof FormatRun)) {
+ return false;
+ }
+ FormatRun other = ( FormatRun ) o;
- public int compareTo(Object obj) {
- FormatRun r = (FormatRun)obj;
- if ((character == r.character) && (fontIndex == r.fontIndex))
- return 0;
- if (character == r.character)
- return fontIndex - r.fontIndex;
- else return character - r.character;
- }
+ return character == other.character && fontIndex == other.fontIndex;
+ }
- public String toString() {
- return "character="+character+",fontIndex="+fontIndex;
- }
+ public int compareTo(Object obj) {
+ FormatRun r = (FormatRun)obj;
+ if ((character == r.character) && (fontIndex == r.fontIndex))
+ return 0;
+ if (character == r.character)
+ return fontIndex - r.fontIndex;
+ else return character - r.character;
+ }
+
+ public String toString() {
+ return "character="+character+",fontIndex="+fontIndex;
+ }
+
+ public void serialize(LittleEndianOutput out) {
+ out.writeShort(character);
+ out.writeShort(fontIndex);
+ }
}
private UnicodeString() {
@@ -116,13 +125,12 @@ public final class UnicodeString implements Comparable {
*/
public boolean equals(Object o)
{
- if ((o == null) || (o.getClass() != this.getClass()))
- {
+ if (!(o instanceof UnicodeString)) {
return false;
}
- UnicodeString other = ( UnicodeString ) o;
+ UnicodeString other = (UnicodeString) o;
- //Ok lets do this in stages to return a quickly, first check the actual string
+ //OK lets do this in stages to return a quickly, first check the actual string
boolean eq = ((field_1_charCount == other.field_1_charCount)
&& (field_2_optionflags == other.field_2_optionflags)
&& field_3_string.equals(other.field_3_string));
@@ -148,7 +156,7 @@ public final class UnicodeString implements Comparable {
if (!run1.equals(run2))
return false;
- }
+ }
//Well the format runs are equal as well!, better check the ExtRst data
//Which by the way we dont know how to decode!
@@ -194,19 +202,17 @@ public final class UnicodeString implements Comparable {
boolean isCompressed = ((field_2_optionflags & 1) == 0);
if (isCompressed) {
- field_3_string = in.readCompressedUnicode(field_1_charCount);
+ field_3_string = in.readCompressedUnicode(field_1_charCount);
} else {
- field_3_string = in.readUnicodeLEString(field_1_charCount);
+ field_3_string = in.readUnicodeLEString(field_1_charCount);
}
if (isRichText() && (runCount > 0)) {
field_4_format_runs = new ArrayList(runCount);
for (int i=0;i<runCount;i++) {
- field_4_format_runs.add(new FormatRun(in.readShort(), in.readShort()));
- //read reserved
- //in.readInt();
- }
+ field_4_format_runs.add(new FormatRun(in));
+ }
}
if (isExtendedText() && (extensionLength > 0)) {
@@ -372,11 +378,8 @@ public final class UnicodeString implements Comparable {
field_2_optionflags = richText.clearByte(field_2_optionflags);
}
- public byte[] getExtendedRst() {
- return this.field_5_ext_rst;
- }
- public void setExtendedRst(byte[] ext_rst) {
+ void setExtendedRst(byte[] ext_rst) {
if (ext_rst != null)
field_2_optionflags = extBit.setByte(field_2_optionflags);
else field_2_optionflags = extBit.clearByte(field_2_optionflags);
@@ -391,13 +394,13 @@ public final class UnicodeString implements Comparable {
* removed / re-ordered
*/
public void swapFontUse(short oldFontIndex, short newFontIndex) {
- Iterator i = field_4_format_runs.iterator();
- while(i.hasNext()) {
- FormatRun run = (FormatRun)i.next();
- if(run.fontIndex == oldFontIndex) {
- run.fontIndex = newFontIndex;
- }
- }
+ Iterator i = field_4_format_runs.iterator();
+ while(i.hasNext()) {
+ FormatRun run = (FormatRun)i.next();
+ if(run.fontIndex == oldFontIndex) {
+ run.fontIndex = newFontIndex;
+ }
+ }
}
/**
@@ -442,353 +445,45 @@ public final class UnicodeString implements Comparable {
return buffer.toString();
}
- private int writeContinueIfRequired(UnicodeRecordStats stats, final int requiredSize, int offset, byte[] data) {
- //Basic string overhead
- if (stats.remainingSize < requiredSize) {
- //Check if be are already in a continue record, if so make sure that
- //we go back and write out our length
- if (stats.lastLengthPos != -1) {
- short lastRecordLength = (short)(offset - stats.lastLengthPos - 2);
- if (lastRecordLength > 8224)
- throw new InternalError();
- LittleEndian.putShort(data, stats.lastLengthPos, lastRecordLength);
- }
-
- LittleEndian.putShort(data, offset, ContinueRecord.sid);
- offset+=2;
- //Record the location of the last continue length position, but don't write
- //anything there yet (since we don't know what it will be!)
- stats.lastLengthPos = offset;
- offset += 2;
-
- stats.recordSize += 4;
- stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
- }
- return offset;
- }
-
- public int serialize(UnicodeRecordStats stats, final int offset, byte [] data)
- {
- int pos = offset;
-
- //Basic string overhead
- pos = writeContinueIfRequired(stats, 3, pos, data);
- LittleEndian.putShort(data, pos, getCharCount());
- pos += 2;
- data[ pos ] = getOptionFlags();
- pos += 1;
- stats.recordSize += 3;
- stats.remainingSize-= 3;
-
- if (isRichText()) {
- if (field_4_format_runs != null) {
- pos = writeContinueIfRequired(stats, 2, pos, data);
-
- LittleEndian.putShort(data, pos, (short) field_4_format_runs.size());
- pos += 2;
- stats.recordSize += 2;
- stats.remainingSize -= 2;
- }
- }
- if ( isExtendedText() )
- {
- if (this.field_5_ext_rst != null) {
- pos = writeContinueIfRequired(stats, 4, pos, data);
-
- LittleEndian.putInt(data, pos, field_5_ext_rst.length);
- pos += 4;
- stats.recordSize += 4;
- stats.remainingSize -= 4;
- }
- }
-
- int charsize = isUncompressedUnicode() ? 2 : 1;
- int strSize = (getString().length() * charsize);
-
- byte[] strBytes = null;
- try {
- String unicodeString = getString();
- if (!isUncompressedUnicode())
- {
- strBytes = unicodeString.getBytes("ISO-8859-1");
- }
- else
- {
- strBytes = unicodeString.getBytes("UTF-16LE");
- }
- }
- catch (Exception e) {
- throw new InternalError();
- }
- if (strSize != strBytes.length)
- throw new InternalError("That shouldnt have happened!");
-
- //Check to see if the offset occurs mid string, if so then we need to add
- //the byte to start with that represents the first byte of the continue record.
- if (strSize > stats.remainingSize) {
- //OK the offset occurs half way through the string, that means that
- //we need an extra byte after the continue record ie we didnt finish
- //writing out the string the 1st time through
-
- //But hang on, how many continue records did we span? What if this is
- //a REALLY long string. We need to work this all out.
- int amountThatCantFit = strSize;
- int strPos = 0;
- while (amountThatCantFit > 0) {
- int amountWritten = Math.min(stats.remainingSize, amountThatCantFit);
- //Make sure that the amount that can't fit takes into account
- //whether we are writing double byte unicode
- if (isUncompressedUnicode()) {
- //We have the '-1' here because whether this is the first record or
- //subsequent continue records, there is always the case that the
- //number of bytes in a string on double byte boundaries is actually odd.
- if ( ( (amountWritten ) % 2) == 1)
- amountWritten--;
- }
- System.arraycopy(strBytes, strPos, data, pos, amountWritten);
- pos += amountWritten;
- strPos += amountWritten;
- stats.recordSize += amountWritten;
- stats.remainingSize -= amountWritten;
-
- //Ok lets subtract what we can write
- amountThatCantFit -= amountWritten;
-
- //Each iteration of this while loop is another continue record, unless
- //everything now fits.
- if (amountThatCantFit > 0) {
- //We know that a continue WILL be requied, but use this common method
- pos = writeContinueIfRequired(stats, amountThatCantFit, pos, data);
-
- //The first byte after a continue mid string is the extra byte to
- //indicate if this run is compressed or not.
- data[pos] = (byte) (isUncompressedUnicode() ? 0x1 : 0x0);
- pos++;
- stats.recordSize++;
- stats.remainingSize --;
- }
- }
- } else {
- if (strSize > (data.length-pos))
- System.out.println("Hmm shouldnt happen");
- //Ok the string fits nicely in the remaining size
- System.arraycopy(strBytes, 0, data, pos, strSize);
- pos += strSize;
- stats.recordSize += strSize;
- stats.remainingSize -= strSize;
- }
-
-
- if (isRichText() && (field_4_format_runs != null)) {
- int count = field_4_format_runs.size();
-
- //This will ensure that a run does not split a continue
- for (int i=0;i<count;i++) {
- pos = writeContinueIfRequired(stats, 4, pos, data);
- FormatRun r = (FormatRun)field_4_format_runs.get(i);
- LittleEndian.putShort(data, pos, r.character);
- pos += 2;
- LittleEndian.putShort(data, pos, r.fontIndex);
- pos += 2;
-
- //Each run count is four bytes
- stats.recordSize += 4;
- stats.remainingSize -=4;
+ public void serialize(ContinuableRecordOutput out) {
+ int numberOfRichTextRuns = 0;
+ int extendedDataSize = 0;
+ if (isRichText() && field_4_format_runs != null) {
+ numberOfRichTextRuns = field_4_format_runs.size();
}
- }
-
- if (isExtendedText() && (field_5_ext_rst != null)) {
- //Ok ExtRst is actually not documented, so i am going to hope
- //that we can actually continue on byte boundaries
- int ammountThatCantFit = field_5_ext_rst.length - stats.remainingSize;
- int extPos = 0;
- if (ammountThatCantFit > 0) {
- while (ammountThatCantFit > 0) {
- //So for this record we have already written
- int ammountWritten = Math.min(stats.remainingSize, ammountThatCantFit);
- System.arraycopy(field_5_ext_rst, extPos, data, pos, ammountWritten);
- pos += ammountWritten;
- extPos += ammountWritten;
- stats.recordSize += ammountWritten;
- stats.remainingSize -= ammountWritten;
-
- //Ok lets subtract what we can write
- ammountThatCantFit -= ammountWritten;
- if (ammountThatCantFit > 0) {
- pos = writeContinueIfRequired(stats, 1, pos, data);
- }
- }
- } else {
- //We can fit wholey in what remains.
- System.arraycopy(field_5_ext_rst, 0, data, pos, field_5_ext_rst.length);
- pos += field_5_ext_rst.length;
- stats.remainingSize -= field_5_ext_rst.length;
- stats.recordSize += field_5_ext_rst.length;
+ if (isExtendedText() && field_5_ext_rst != null) {
+ extendedDataSize = field_5_ext_rst.length;
}
- }
-
- return pos - offset;
- }
-
-
- public void setCompressedUnicode() {
- field_2_optionflags = highByte.setByte(field_2_optionflags);
- }
-
- public void setUncompressedUnicode() {
- field_2_optionflags = highByte.clearByte(field_2_optionflags);
- }
-
- private boolean isUncompressedUnicode()
- {
- return highByte.isSet(getOptionFlags());
- }
-
- /** Returns the size of this record, given the amount of record space
- * remaining, it will also include the size of writing a continue record.
- */
-
- public static class UnicodeRecordStats {
- public int recordSize;
- public int remainingSize = SSTRecord.MAX_RECORD_SIZE;
- public int lastLengthPos = -1;
- }
- public void getRecordSize(UnicodeRecordStats stats) {
- //Basic string overhead
- if (stats.remainingSize < 3) {
- //Needs a continue
- stats.recordSize += 4;
- stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
- }
- stats.recordSize += 3;
- stats.remainingSize-= 3;
-
- //Read the number of rich runs if rich text.
- if ( isRichText() )
- {
- //Run count
- if (stats.remainingSize < 2) {
- //Needs a continue
- //Reset the available space.
- stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
- //continue record overhead
- stats.recordSize+=4;
- }
-
- stats.recordSize += 2;
- stats.remainingSize -=2;
- }
- //Read the size of extended data if present.
- if ( isExtendedText() )
- {
- //Needs a continue
- //extension length
- if (stats.remainingSize < 4) {
- //Reset the available space.
- stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
- //continue record overhead
- stats.recordSize+=4;
- }
-
- stats.recordSize += 4;
- stats.remainingSize -=4;
- }
-
- int charsize = isUncompressedUnicode() ? 2 : 1;
- int strSize = (getString().length() * charsize);
- //Check to see if the offset occurs mid string, if so then we need to add
- //the byte to start with that represents the first byte of the continue record.
- if (strSize > stats.remainingSize) {
- //Ok the offset occurs half way through the string, that means that
- //we need an extra byte after the continue record ie we didnt finish
- //writing out the string the 1st time through
-
- //But hang on, how many continue records did we span? What if this is
- //a REALLY long string. We need to work this all out.
- int ammountThatCantFit = strSize;
- while (ammountThatCantFit > 0) {
- int ammountWritten = Math.min(stats.remainingSize, ammountThatCantFit);
- //Make sure that the ammount that cant fit takes into account
- //whether we are writing double byte unicode
- if (isUncompressedUnicode()) {
- //We have the '-1' here because whether this is the first record or
- //subsequent continue records, there is always the case that the
- //number of bytes in a string on doube byte boundaries is actually odd.
- if ( ( (ammountWritten) % 2) == 1)
- ammountWritten--;
- }
- stats.recordSize += ammountWritten;
- stats.remainingSize -= ammountWritten;
-
- //Ok lets subtract what we can write
- ammountThatCantFit -= ammountWritten;
-
- //Each iteration of this while loop is another continue record, unless
- //everything now fits.
- if (ammountThatCantFit > 0) {
- //Reset the available space.
- stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
- //continue record overhead
- stats.recordSize+=4;
-
- //The first byte after a continue mid string is the extra byte to
- //indicate if this run is compressed or not.
- stats.recordSize++;
- stats.remainingSize --;
- }
- }
- } else {
- //Ok the string fits nicely in the remaining size
- stats.recordSize += strSize;
- stats.remainingSize -= strSize;
- }
+
+ out.writeString(field_3_string, numberOfRichTextRuns, extendedDataSize);
- if (isRichText() && (field_4_format_runs != null)) {
- int count = field_4_format_runs.size();
+ if (numberOfRichTextRuns > 0) {
- //This will ensure that a run does not split a continue
- for (int i=0;i<count;i++) {
- if (stats.remainingSize < 4) {
- //Reset the available space.
- stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
- //continue record overhead
- stats.recordSize+=4;
+ //This will ensure that a run does not split a continue
+ for (int i=0;i<numberOfRichTextRuns;i++) {
+ if (out.getAvailableSpace() < 4) {
+ out.writeContinue();
+ }
+ FormatRun r = (FormatRun)field_4_format_runs.get(i);
+ r.serialize(out);
}
-
- //Each run count is four bytes
- stats.recordSize += 4;
- stats.remainingSize -=4;
}
- }
- if (isExtendedText() && (field_5_ext_rst != null)) {
- //Ok ExtRst is actually not documented, so i am going to hope
- //that we can actually continue on byte boundaries
- int ammountThatCantFit = field_5_ext_rst.length - stats.remainingSize;
- if (ammountThatCantFit > 0) {
- while (ammountThatCantFit > 0) {
- //So for this record we have already written
- int ammountWritten = Math.min(stats.remainingSize, ammountThatCantFit);
- stats.recordSize += ammountWritten;
- stats.remainingSize -= ammountWritten;
-
- //Ok lets subtract what we can write
- ammountThatCantFit -= ammountWritten;
- if (ammountThatCantFit > 0) {
- //Each iteration of this while loop is another continue record.
-
- //Reset the available space.
- stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
- //continue record overhead
- stats.recordSize += 4;
+ if (extendedDataSize > 0) {
+ // OK ExtRst is actually not documented, so i am going to hope
+ // that we can actually continue on byte boundaries
+
+ int extPos = 0;
+ while (true) {
+ int nBytesToWrite = Math.min(extendedDataSize - extPos, out.getAvailableSpace());
+ out.write(field_5_ext_rst, extPos, nBytesToWrite);
+ extPos += nBytesToWrite;
+ if (extPos >= extendedDataSize) {
+ break;
+ }
+ out.writeContinue();
}
- }
- } else {
- //We can fit wholey in what remains.
- stats.remainingSize -= field_5_ext_rst.length;
- stats.recordSize += field_5_ext_rst.length;
}
- }
}
public int compareTo(Object obj)
@@ -801,9 +496,9 @@ public final class UnicodeString implements Comparable {
if (result != 0)
return result;
- //Ok string appears to be equal but now lets compare formatting runs
+ //OK string appears to be equal but now lets compare formatting runs
if ((field_4_format_runs == null) && (str.field_4_format_runs == null))
- //Strings are equal, and there are no formtting runs.
+ //Strings are equal, and there are no formatting runs.
return 0;
if ((field_4_format_runs == null) && (str.field_4_format_runs != null))
@@ -850,12 +545,12 @@ public final class UnicodeString implements Comparable {
return 0;
}
- public boolean isRichText()
+ private boolean isRichText()
{
return richText.isSet(getOptionFlags());
}
- public boolean isExtendedText()
+ private boolean isExtendedText()
{
return extBit.isSet(getOptionFlags());
}
@@ -877,10 +572,8 @@ public final class UnicodeString implements Comparable {
str.field_5_ext_rst = new byte[field_5_ext_rst.length];
System.arraycopy(field_5_ext_rst, 0, str.field_5_ext_rst, 0,
field_5_ext_rst.length);
- }
+ }
return str;
}
-
-
}
diff --git a/src/java/org/apache/poi/hssf/record/constant/ConstantValueParser.java b/src/java/org/apache/poi/hssf/record/constant/ConstantValueParser.java
index 8304eb0ee3..3fbbee12ac 100755
--- a/src/java/org/apache/poi/hssf/record/constant/ConstantValueParser.java
+++ b/src/java/org/apache/poi/hssf/record/constant/ConstantValueParser.java
@@ -17,8 +17,6 @@
package org.apache.poi.hssf.record.constant;
-import org.apache.poi.hssf.record.UnicodeString;
-import org.apache.poi.hssf.record.UnicodeString.UnicodeRecordStats;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
@@ -65,7 +63,7 @@ public final class ConstantValueParser {
case TYPE_NUMBER:
return new Double(in.readDouble());
case TYPE_STRING:
- return new UnicodeString(StringUtil.readUnicodeString(in));
+ return StringUtil.readUnicodeString(in);
case TYPE_BOOLEAN:
return readBoolean(in);
case TYPE_ERROR_CODE:
@@ -111,10 +109,8 @@ public final class ConstantValueParser {
if(cls == Boolean.class || cls == Double.class || cls == ErrorConstant.class) {
return 8;
}
- UnicodeString strVal = (UnicodeString)object;
- UnicodeRecordStats urs = new UnicodeRecordStats();
- strVal.getRecordSize(urs);
- return urs.recordSize;
+ String strVal = (String)object;
+ return StringUtil.getEncodedSize(strVal);
}
public static void encode(LittleEndianOutput out, Object[] values) {
@@ -142,10 +138,10 @@ public final class ConstantValueParser {
out.writeDouble(dVal.doubleValue());
return;
}
- if (value instanceof UnicodeString) {
- UnicodeString usVal = (UnicodeString) value;
+ if (value instanceof String) {
+ String val = (String) value;
out.writeByte(TYPE_STRING);
- StringUtil.writeUnicodeString(out, usVal.getString());
+ StringUtil.writeUnicodeString(out, val);
return;
}
if (value instanceof ErrorConstant) {
diff --git a/src/java/org/apache/poi/hssf/record/cont/ContinuableRecord.java b/src/java/org/apache/poi/hssf/record/cont/ContinuableRecord.java
new file mode 100644
index 0000000000..135b93ff44
--- /dev/null
+++ b/src/java/org/apache/poi/hssf/record/cont/ContinuableRecord.java
@@ -0,0 +1,69 @@
+/* ====================================================================
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+==================================================================== */
+
+package org.apache.poi.hssf.record.cont;
+
+import org.apache.poi.hssf.record.ContinueRecord;
+import org.apache.poi.hssf.record.Record;
+import org.apache.poi.util.LittleEndianByteArrayOutputStream;
+import org.apache.poi.util.LittleEndianOutput;
+
+/**
+ * Common superclass of all records that can produce {@link ContinueRecord}s while being serialized.
+ *
+ * @author Josh Micich
+ */
+public abstract class ContinuableRecord extends Record {
+
+ protected ContinuableRecord() {
+ // no fields to initialise
+ }
+ /**
+ * Serializes this record's content to the supplied data output.<br/>
+ * The standard BIFF header (ushort sid, ushort size) has been handled by the superclass, so
+ * only BIFF data should be written by this method. Simple data types can be written with the
+ * standard {@link LittleEndianOutput} methods. Methods from {@link ContinuableRecordOutput}
+ * can be used to serialize strings (with {@link ContinueRecord}s being written as required).
+ * If necessary, implementors can explicitly start {@link ContinueRecord}s (regardless of the
+ * amount of remaining space).
+ *
+ * @param out a data output stream
+ */
+ protected abstract void serialize(ContinuableRecordOutput out);
+
+
+ /**
+ * @return four less than the total length of the encoded record(s)
+ * (in the case when no {@link ContinueRecord} is needed, this is the
+ * same ushort value that gets encoded after the record sid
+ */
+ protected final int getDataSize() {
+ ContinuableRecordOutput out = ContinuableRecordOutput.createForCountingOnly();
+ serialize(out);
+ out.terminate();
+ return out.getTotalSize() - 4;
+ }
+
+ public final int serialize(int offset, byte[] data) {
+
+ LittleEndianOutput leo = new LittleEndianByteArrayOutputStream(data, offset);
+ ContinuableRecordOutput out = new ContinuableRecordOutput(leo, getSid());
+ serialize(out);
+ out.terminate();
+ return out.getTotalSize();
+ }
+}
diff --git a/src/java/org/apache/poi/hssf/record/cont/ContinuableRecordOutput.java b/src/java/org/apache/poi/hssf/record/cont/ContinuableRecordOutput.java
new file mode 100644
index 0000000000..5d540365b7
--- /dev/null
+++ b/src/java/org/apache/poi/hssf/record/cont/ContinuableRecordOutput.java
@@ -0,0 +1,257 @@
+/* ====================================================================
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+==================================================================== */
+
+package org.apache.poi.hssf.record.cont;
+
+import org.apache.poi.hssf.record.ContinueRecord;
+import org.apache.poi.util.DelayableLittleEndianOutput;
+import org.apache.poi.util.LittleEndianOutput;
+import org.apache.poi.util.StringUtil;
+
+/**
+ * An augmented {@link LittleEndianOutput} used for serialization of {@link ContinuableRecord}s.
+ * This class keeps track of how much remaining space is available in the current BIFF record and
+ * can start new {@link ContinueRecord}s as required.
+ *
+ * @author Josh Micich
+ */
+public final class ContinuableRecordOutput implements LittleEndianOutput {
+
+ private final LittleEndianOutput _out;
+ private UnknownLengthRecordOutput _ulrOutput;
+ private int _totalPreviousRecordsSize;
+
+ ContinuableRecordOutput(LittleEndianOutput out, int sid) {
+ _ulrOutput = new UnknownLengthRecordOutput(out, sid);
+ _out = out;
+ _totalPreviousRecordsSize = 0;
+ }
+
+ public static ContinuableRecordOutput createForCountingOnly() {
+ return new ContinuableRecordOutput(NOPOutput, -777); // fake sid
+ }
+
+ /**
+ * @return total number of bytes written so far (including all BIFF headers)
+ */
+ public int getTotalSize() {
+ return _totalPreviousRecordsSize + _ulrOutput.getTotalSize();
+ }
+ /**
+ * Terminates the last record (also updates its 'ushort size' field)
+ */
+ void terminate() {
+ _ulrOutput.terminate();
+ }
+ /**
+ * @return number of remaining bytes of space in current record
+ */
+ public int getAvailableSpace() {
+ return _ulrOutput.getAvailableSpace();
+ }
+
+ /**
+ * Terminates the current record and starts a new {@link ContinueRecord} (regardless
+ * of how much space is still available in the current record).
+ */
+ public void writeContinue() {
+ _ulrOutput.terminate();
+ _totalPreviousRecordsSize += _ulrOutput.getTotalSize();
+ _ulrOutput = new UnknownLengthRecordOutput(_out, ContinueRecord.sid);
+ }
+ public void writeContinueIfRequired(int requiredContinuousSize) {
+ if (_ulrOutput.getAvailableSpace() < requiredContinuousSize) {
+ writeContinue();
+ }
+ }
+
+ /**
+ * Writes the 'optionFlags' byte and encoded character data of a unicode string. This includes:
+ * <ul>
+ * <li>byte optionFlags</li>
+ * <li>encoded character data (in "ISO-8859-1" or "UTF-16LE" encoding)</li>
+ * </ul>
+ *
+ * Notes:
+ * <ul>
+ * <li>The value of the 'is16bitEncoded' flag is determined by the actual character data
+ * of <tt>text</tt></li>
+ * <li>The string options flag is never separated (by a {@link ContinueRecord}) from the
+ * first chunk of character data it refers to.</li>
+ * <li>The 'ushort length' field is assumed to have been explicitly written earlier. Hence,
+ * there may be an intervening {@link ContinueRecord}</li>
+ * </ul>
+ */
+ public void writeStringData(String text) {
+ boolean is16bitEncoded = StringUtil.hasMultibyte(text);
+ // calculate total size of the header and first encoded char
+ int keepTogetherSize = 1 + 1; // ushort len, at least one character byte
+ int optionFlags = 0x00;
+ if (is16bitEncoded) {
+ optionFlags |= 0x01;
+ keepTogetherSize += 1; // one extra byte for first char
+ }
+ writeContinueIfRequired(keepTogetherSize);
+ writeByte(optionFlags);
+ writeCharacterData(text, is16bitEncoded);
+ }
+ /**
+ * Writes a unicode string complete with header and character data. This includes:
+ * <ul>
+ * <li>ushort length</li>
+ * <li>byte optionFlags</li>
+ * <li>ushort numberOfRichTextRuns (optional)</li>
+ * <li>ushort extendedDataSize (optional)</li>
+ * <li>encoded character data (in "ISO-8859-1" or "UTF-16LE" encoding)</li>
+ * </ul>
+ *
+ * The following bits of the 'optionFlags' byte will be set as appropriate:
+ * <table border='1'>
+ * <tr><th>Mask</th><th>Description</th></tr>
+ * <tr><td>0x01</td><td>is16bitEncoded</td></tr>
+ * <tr><td>0x04</td><td>hasExtendedData</td></tr>
+ * <tr><td>0x08</td><td>isRichText</td></tr>
+ * </table>
+ * Notes:
+ * <ul>
+ * <li>The value of the 'is16bitEncoded' flag is determined by the actual character data
+ * of <tt>text</tt></li>
+ * <li>The string header fields are never separated (by a {@link ContinueRecord}) from the
+ * first chunk of character data (i.e. the first character is always encoded in the same
+ * record as the string header).</li>
+ * </ul>
+ */
+ public void writeString(String text, int numberOfRichTextRuns, int extendedDataSize) {
+ boolean is16bitEncoded = StringUtil.hasMultibyte(text);
+ // calculate total size of the header and first encoded char
+ int keepTogetherSize = 2 + 1 + 1; // ushort len, byte optionFlags, at least one character byte
+ int optionFlags = 0x00;
+ if (is16bitEncoded) {
+ optionFlags |= 0x01;
+ keepTogetherSize += 1; // one extra byte for first char
+ }
+ if (numberOfRichTextRuns > 0) {
+ optionFlags |= 0x08;
+ keepTogetherSize += 2;
+ }
+ if (extendedDataSize > 0) {
+ optionFlags |= 0x04;
+ keepTogetherSize += 4;
+ }
+ writeContinueIfRequired(keepTogetherSize);
+ writeShort(text.length());
+ writeByte(optionFlags);
+ if (numberOfRichTextRuns > 0) {
+ writeShort(numberOfRichTextRuns);
+ }
+ if (extendedDataSize > 0) {
+ writeInt(extendedDataSize);
+ }
+ writeCharacterData(text, is16bitEncoded);
+ }
+
+
+ private void writeCharacterData(String text, boolean is16bitEncoded) {
+ int nChars = text.length();
+ int i=0;
+ if (is16bitEncoded) {
+ while(true) {
+ int nWritableChars = Math.min(nChars-i, _ulrOutput.getAvailableSpace() / 2);
+ for ( ; nWritableChars > 0; nWritableChars--) {
+ _ulrOutput.writeShort(text.charAt(i++));
+ }
+ if (i >= nChars) {
+ break;
+ }
+ writeContinue();
+ writeByte(0x01);
+ }
+ } else {
+ while(true) {
+ int nWritableChars = Math.min(nChars-i, _ulrOutput.getAvailableSpace() / 1);
+ for ( ; nWritableChars > 0; nWritableChars--) {
+ _ulrOutput.writeByte(text.charAt(i++));
+ }
+ if (i >= nChars) {
+ break;
+ }
+ writeContinue();
+ writeByte(0x00);
+ }
+ }
+ }
+
+ public void write(byte[] b) {
+ writeContinueIfRequired(b.length);
+ _ulrOutput.write(b);
+ }
+ public void write(byte[] b, int offset, int len) {
+ writeContinueIfRequired(len);
+ _ulrOutput.write(b, offset, len);
+ }
+ public void writeByte(int v) {
+ writeContinueIfRequired(1);
+ _ulrOutput.writeByte(v);
+ }
+ public void writeDouble(double v) {
+ writeContinueIfRequired(8);
+ _ulrOutput.writeDouble(v);
+ }
+ public void writeInt(int v) {
+ writeContinueIfRequired(4);
+ _ulrOutput.writeInt(v);
+ }
+ public void writeLong(long v) {
+ writeContinueIfRequired(8);
+ _ulrOutput.writeLong(v);
+ }
+ public void writeShort(int v) {
+ writeContinueIfRequired(2);
+ _ulrOutput.writeShort(v);
+ }
+
+ /**
+ * Allows optimised usage of {@link ContinuableRecordOutput} for sizing purposes only.
+ */
+ private static final LittleEndianOutput NOPOutput = new DelayableLittleEndianOutput() {
+
+ public LittleEndianOutput createDelayedOutput(int size) {
+ return this;
+ }
+ public void write(byte[] b) {
+ // does nothing
+ }
+ public void write(byte[] b, int offset, int len) {
+ // does nothing
+ }
+ public void writeByte(int v) {
+ // does nothing
+ }
+ public void writeDouble(double v) {
+ // does nothing
+ }
+ public void writeInt(int v) {
+ // does nothing
+ }
+ public void writeLong(long v) {
+ // does nothing
+ }
+ public void writeShort(int v) {
+ // does nothing
+ }
+ };
+}
diff --git a/src/java/org/apache/poi/hssf/record/cont/UnknownLengthRecordOutput.java b/src/java/org/apache/poi/hssf/record/cont/UnknownLengthRecordOutput.java
new file mode 100644
index 0000000000..9209566c23
--- /dev/null
+++ b/src/java/org/apache/poi/hssf/record/cont/UnknownLengthRecordOutput.java
@@ -0,0 +1,114 @@
+/* ====================================================================
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+==================================================================== */
+
+package org.apache.poi.hssf.record.cont;
+
+import org.apache.poi.hssf.record.RecordInputStream;
+import org.apache.poi.util.DelayableLittleEndianOutput;
+import org.apache.poi.util.LittleEndianByteArrayOutputStream;
+import org.apache.poi.util.LittleEndianOutput;
+/**
+ * Allows the writing of BIFF records when the 'ushort size' header field is not known in advance.
+ * When the client is finished writing data, it calls {@link #terminate()}, at which point this
+ * class updates the 'ushort size' with its final value.
+ *
+ * @author Josh Micich
+ */
+final class UnknownLengthRecordOutput implements LittleEndianOutput {
+ private static final int MAX_DATA_SIZE = RecordInputStream.MAX_RECORD_DATA_SIZE;
+
+ private final LittleEndianOutput _originalOut;
+ /** for writing the 'ushort size' field once its value is known */
+ private final LittleEndianOutput _dataSizeOutput;
+ private final byte[] _byteBuffer;
+ private LittleEndianOutput _out;
+ private int _size;
+
+ public UnknownLengthRecordOutput(LittleEndianOutput out, int sid) {
+ _originalOut = out;
+ out.writeShort(sid);
+ if (out instanceof DelayableLittleEndianOutput) {
+ // optimisation
+ DelayableLittleEndianOutput dleo = (DelayableLittleEndianOutput) out;
+ _dataSizeOutput = dleo.createDelayedOutput(2);
+ _byteBuffer = null;
+ _out = out;
+ } else {
+ // otherwise temporarily write all subsequent data to a buffer
+ _dataSizeOutput = out;
+ _byteBuffer = new byte[RecordInputStream.MAX_RECORD_DATA_SIZE];
+ _out = new LittleEndianByteArrayOutputStream(_byteBuffer, 0);
+ }
+ }
+ /**
+ * includes 4 byte header
+ */
+ public int getTotalSize() {
+ return 4 + _size;
+ }
+ public int getAvailableSpace() {
+ if (_out == null) {
+ throw new IllegalStateException("Record already terminated");
+ }
+ return MAX_DATA_SIZE - _size;
+ }
+ /**
+ * Finishes writing the current record and updates 'ushort size' field.<br/>
+ * After this method is called, only {@link #getTotalSize()} may be called.
+ */
+ public void terminate() {
+ if (_out == null) {
+ throw new IllegalStateException("Record already terminated");
+ }
+ _dataSizeOutput.writeShort(_size);
+ if (_byteBuffer != null) {
+ _originalOut.write(_byteBuffer, 0, _size);
+ _out = null;
+ return;
+ }
+ _out = null;
+ }
+
+ public void write(byte[] b) {
+ _out.write(b);
+ _size += b.length;
+ }
+ public void write(byte[] b, int offset, int len) {
+ _out.write(b, offset, len);
+ _size += len;
+ }
+ public void writeByte(int v) {
+ _out.writeByte(v);
+ _size += 1;
+ }
+ public void writeDouble(double v) {
+ _out.writeDouble(v);
+ _size += 8;
+ }
+ public void writeInt(int v) {
+ _out.writeInt(v);
+ _size += 4;
+ }
+ public void writeLong(long v) {
+ _out.writeLong(v);
+ _size += 8;
+ }
+ public void writeShort(int v) {
+ _out.writeShort(v);
+ _size += 2;
+ }
+}
diff --git a/src/java/org/apache/poi/hssf/record/formula/ArrayPtg.java b/src/java/org/apache/poi/hssf/record/formula/ArrayPtg.java
index 3fcc23eda0..7d4f65acd9 100644
--- a/src/java/org/apache/poi/hssf/record/formula/ArrayPtg.java
+++ b/src/java/org/apache/poi/hssf/record/formula/ArrayPtg.java
@@ -206,8 +206,8 @@ public final class ArrayPtg extends Ptg {
if (o == null) {
throw new RuntimeException("Array item cannot be null");
}
- if (o instanceof UnicodeString) {
- return "\"" + ((UnicodeString)o).getString() + "\"";
+ if (o instanceof String) {
+ return "\"" + (String)o + "\"";
}
if (o instanceof Double) {
return ((Double)o).toString();
diff --git a/src/java/org/apache/poi/hssf/usermodel/HSSFCell.java b/src/java/org/apache/poi/hssf/usermodel/HSSFCell.java
index 5707356221..6f327b21c3 100644
--- a/src/java/org/apache/poi/hssf/usermodel/HSSFCell.java
+++ b/src/java/org/apache/poi/hssf/usermodel/HSSFCell.java
@@ -43,7 +43,6 @@ import org.apache.poi.hssf.record.NumberRecord;
import org.apache.poi.hssf.record.ObjRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordBase;
-import org.apache.poi.hssf.record.StringRecord;
import org.apache.poi.hssf.record.SubRecord;
import org.apache.poi.hssf.record.TextObjectRecord;
import org.apache.poi.hssf.record.UnicodeString;
@@ -257,7 +256,7 @@ public class HSSFCell implements Cell {
}
public int getColumnIndex() {
- return record.getColumn() & 0xFFFF;
+ return record.getColumn() & 0xFFFF;
}
/**
@@ -336,38 +335,23 @@ public class HSSFCell implements Cell {
break;
case CELL_TYPE_STRING :
- LabelSSTRecord lrec = null;
+ LabelSSTRecord lrec;
- if (cellType != this.cellType)
- {
+ if (cellType == this.cellType) {
+ lrec = (LabelSSTRecord) record;
+ } else {
lrec = new LabelSSTRecord();
+ lrec.setColumn(col);
+ lrec.setRow(row);
+ lrec.setXFIndex(styleIndex);
}
- else
- {
- lrec = ( LabelSSTRecord ) record;
- }
- lrec.setColumn(col);
- lrec.setRow(row);
- lrec.setXFIndex(styleIndex);
- if (setValue)
- {
- if ((getStringCellValue() != null)
- && (!getStringCellValue().equals("")))
- {
- int sst = 0;
-
- UnicodeString str = getRichStringCellValue().getUnicodeString();
-//jmh if (encoding == ENCODING_COMPRESSED_UNICODE)
-//jmh {
-// jmh str.setCompressedUnicode();
-// jmh } else if (encoding == ENCODING_UTF_16)
-// jmh {
-// jmh str.setUncompressedUnicode();
-// jmh }
- sst = book.getWorkbook().addSSTString(str);
- lrec.setSSTIndex(sst);
- getRichStringCellValue().setUnicodeString(book.getWorkbook().getSSTString(sst));
- }
+ if (setValue) {
+ String str = convertCellValueToString();
+ int sstIndex = book.getWorkbook().addSSTString(new UnicodeString(str));
+ lrec.setSSTIndex(sstIndex);
+ UnicodeString us = book.getWorkbook().getSSTString(sstIndex);
+ stringValue = new HSSFRichTextString();
+ stringValue.setUnicodeString(us);
}
record = lrec;
break;
@@ -782,7 +766,9 @@ public class HSSFCell implements Cell {
case CELL_TYPE_BOOLEAN:
return (( BoolErrRecord ) record).getBooleanValue();
case CELL_TYPE_STRING:
- return Boolean.valueOf(((StringRecord)record).getString()).booleanValue();
+ int sstIndex = ((LabelSSTRecord)record).getSSTIndex();
+ String text = book.getWorkbook().getSSTString(sstIndex).getString();
+ return Boolean.valueOf(text).booleanValue();
case CELL_TYPE_NUMERIC:
return ((NumberRecord)record).getValue() != 0;
@@ -796,6 +782,26 @@ public class HSSFCell implements Cell {
}
throw new RuntimeException("Unexpected cell type (" + cellType + ")");
}
+ private String convertCellValueToString() {
+
+ switch (cellType) {
+ case CELL_TYPE_BLANK:
+ return "";
+ case CELL_TYPE_BOOLEAN:
+ return ((BoolErrRecord) record).getBooleanValue() ? "TRUE" : "FALSE";
+ case CELL_TYPE_STRING:
+ int sstIndex = ((LabelSSTRecord)record).getSSTIndex();
+ return book.getWorkbook().getSSTString(sstIndex).getString();
+ case CELL_TYPE_NUMERIC:
+ return String.valueOf(((NumberRecord)record).getValue());
+ case CELL_TYPE_ERROR:
+ return HSSFErrorConstants.getText(((BoolErrRecord) record).getErrorValue());
+ case CELL_TYPE_FORMULA:
+ // should really evaluate, but HSSFCell can't call HSSFFormulaEvaluator
+ return "";
+ }
+ throw new RuntimeException("Unexpected cell type (" + cellType + ")");
+ }
/**
* get the value of the cell as a boolean. For strings, numbers, and errors, we throw an exception.
diff --git a/src/java/org/apache/poi/util/DelayableLittleEndianOutput.java b/src/java/org/apache/poi/util/DelayableLittleEndianOutput.java
new file mode 100644
index 0000000000..d8e4395e64
--- /dev/null
+++ b/src/java/org/apache/poi/util/DelayableLittleEndianOutput.java
@@ -0,0 +1,34 @@
+/* ====================================================================
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+==================================================================== */
+
+package org.apache.poi.util;
+/**
+ * Implementors of this interface allow client code to 'delay' writing to a certain section of a
+ * data output stream.<br/>
+ * A typical application is for writing BIFF records when the size is not known until well after
+ * the header has been written. The client code can call {@link #createDelayedOutput(int)}
+ * to reserve two bytes of the output for the 'ushort size' header field. The delayed output can
+ * be written at any stage.
+ *
+ * @author Josh Micich
+ */
+public interface DelayableLittleEndianOutput extends LittleEndianOutput {
+ /**
+ * Creates an output stream intended for outputting a sequence of <tt>size</tt> bytes.
+ */
+ LittleEndianOutput createDelayedOutput(int size);
+}
diff --git a/src/java/org/apache/poi/util/LittleEndianByteArrayOutputStream.java b/src/java/org/apache/poi/util/LittleEndianByteArrayOutputStream.java
index 1b68a348be..b3ded97687 100644
--- a/src/java/org/apache/poi/util/LittleEndianByteArrayOutputStream.java
+++ b/src/java/org/apache/poi/util/LittleEndianByteArrayOutputStream.java
@@ -24,7 +24,7 @@ package org.apache.poi.util;
*
* @author Josh Micich
*/
-public final class LittleEndianByteArrayOutputStream implements LittleEndianOutput {
+public final class LittleEndianByteArrayOutputStream implements LittleEndianOutput, DelayableLittleEndianOutput {
private final byte[] _buf;
private final int _endIndex;
private int _writeIndex;
@@ -89,4 +89,10 @@ public final class LittleEndianByteArrayOutputStream implements LittleEndianOutp
public int getWriteIndex() {
return _writeIndex;
}
+ public LittleEndianOutput createDelayedOutput(int size) {
+ checkPosition(size);
+ LittleEndianOutput result = new LittleEndianByteArrayOutputStream(_buf, _writeIndex, _writeIndex+size);
+ _writeIndex += size;
+ return result;
+ }
}
diff --git a/src/java/org/apache/poi/util/StringUtil.java b/src/java/org/apache/poi/util/StringUtil.java
index 8a57d2340b..263f45e1e5 100644
--- a/src/java/org/apache/poi/util/StringUtil.java
+++ b/src/java/org/apache/poi/util/StringUtil.java
@@ -163,6 +163,15 @@ public class StringUtil {
}
/**
+ * @return the number of bytes that would be written by {@link #writeUnicodeString(LittleEndianOutput, String)}
+ */
+ public static int getEncodedSize(String value) {
+ int result = 2 + 1;
+ result += value.length() * (StringUtil.hasMultibyte(value) ? 2 : 1);
+ return result;
+ }
+
+ /**
* Takes a unicode (java) string, and returns it as 8 bit data (in ISO-8859-1
* codepage).
* (In Excel terms, write compressed 8 bit unicode)