Bläddra i källkod

Merged revisions 709570,709598,710114,710134,710136,711505,711513-711515,711694,711739,711741,711746,711749 via svnmerge from

https://svn.apache.org/repos/asf/poi/trunk

........
  r709570 | josh | 2008-10-31 14:17:08 -0700 (Fri, 31 Oct 2008) | 1 line
  
  made DrawingSelectionRecord into a plain BIFF record (not an escher holder aggregate). Added some interpretation of fields
........
  r709598 | josh | 2008-10-31 16:24:41 -0700 (Fri, 31 Oct 2008) | 1 line
  
  Simplified multiple record expansion logic
........
  r710114 | yegor | 2008-11-03 09:54:01 -0800 (Mon, 03 Nov 2008) | 1 line
  
  fixed #46122: Picture#getEscherBSERecord threw NullPointerException if EscherContainerRecord.BSTORE_CONTAINER was not found
........
  r710134 | yegor | 2008-11-03 11:19:39 -0800 (Mon, 03 Nov 2008) | 1 line
  
  fixed bug #46033: table cells had incorrect text type resulting in corrupted style info
........
  r710136 | yegor | 2008-11-03 11:23:52 -0800 (Mon, 03 Nov 2008) | 1 line
  
  updated status of the fixed bug #46033
........
  r711505 | josh | 2008-11-04 19:50:31 -0800 (Tue, 04 Nov 2008) | 1 line
  
  Refactored test case
........
  r711513 | josh | 2008-11-04 21:45:17 -0800 (Tue, 04 Nov 2008) | 1 line
  
  Converted ConstantValueParser to use plain Strings instead of UnicodeStrings
........
  r711514 | josh | 2008-11-04 21:52:35 -0800 (Tue, 04 Nov 2008) | 1 line
  
  Converted SupBookRecord to use plain Strings instead of UnicodeStrings
........
  r711515 | josh | 2008-11-04 22:15:59 -0800 (Tue, 04 Nov 2008) | 1 line
  
  Refactored test case
........
  r711694 | josh | 2008-11-05 12:46:00 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Fixed bug in conversion to/from text cells
........
  r711739 | josh | 2008-11-05 15:28:55 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Refactoring test case
........
  r711741 | josh | 2008-11-05 15:35:02 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Refactoring test case
........
  r711746 | josh | 2008-11-05 15:45:42 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Fixed mistake in test case.  Constant value was 4 bytes too large (should be max *data* size not max *record* size).
........
  r711749 | josh | 2008-11-05 17:12:41 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Introduced ContinuableRecord to help fix serialization of StringRecords with large data.  Fixed TextObjectRecord to only write 16bit unicode when needed.  Simplification in UnicodeString.
........


git-svn-id: https://svn.apache.org/repos/asf/poi/branches/ooxml@711755 13f79535-47bb-0310-9956-ffa450edef68
tags/ooxml_20081107
Josh Micich 15 år sedan
förälder
incheckning
2963774c37
37 ändrade filer med 1398 tillägg och 1360 borttagningar
  1. 2
    0
      src/documentation/content/xdocs/changes.xml
  2. 2
    0
      src/documentation/content/xdocs/status.xml
  3. 6
    5
      src/java/org/apache/poi/hssf/model/LinkTable.java
  4. 125
    21
      src/java/org/apache/poi/hssf/record/DrawingSelectionRecord.java
  5. 85
    56
      src/java/org/apache/poi/hssf/record/RecordFactory.java
  6. 0
    10
      src/java/org/apache/poi/hssf/record/RecordInputStream.java
  7. 15
    74
      src/java/org/apache/poi/hssf/record/SSTRecord.java
  8. 0
    76
      src/java/org/apache/poi/hssf/record/SSTRecordHeader.java
  9. 0
    51
      src/java/org/apache/poi/hssf/record/SSTRecordSizeCalculator.java
  10. 19
    42
      src/java/org/apache/poi/hssf/record/SSTSerializer.java
  11. 29
    89
      src/java/org/apache/poi/hssf/record/StringRecord.java
  12. 47
    53
      src/java/org/apache/poi/hssf/record/SupBookRecord.java
  13. 19
    109
      src/java/org/apache/poi/hssf/record/TextObjectRecord.java
  14. 107
    414
      src/java/org/apache/poi/hssf/record/UnicodeString.java
  15. 6
    10
      src/java/org/apache/poi/hssf/record/constant/ConstantValueParser.java
  16. 69
    0
      src/java/org/apache/poi/hssf/record/cont/ContinuableRecord.java
  17. 257
    0
      src/java/org/apache/poi/hssf/record/cont/ContinuableRecordOutput.java
  18. 114
    0
      src/java/org/apache/poi/hssf/record/cont/UnknownLengthRecordOutput.java
  19. 2
    2
      src/java/org/apache/poi/hssf/record/formula/ArrayPtg.java
  20. 38
    32
      src/java/org/apache/poi/hssf/usermodel/HSSFCell.java
  21. 34
    0
      src/java/org/apache/poi/util/DelayableLittleEndianOutput.java
  22. 7
    1
      src/java/org/apache/poi/util/LittleEndianByteArrayOutputStream.java
  23. 9
    0
      src/java/org/apache/poi/util/StringUtil.java
  24. 6
    2
      src/scratchpad/src/org/apache/poi/hslf/model/Picture.java
  25. 2
    2
      src/scratchpad/src/org/apache/poi/hslf/model/TableCell.java
  26. 23
    0
      src/scratchpad/testcases/org/apache/poi/hslf/model/TestPicture.java
  27. 5
    0
      src/scratchpad/testcases/org/apache/poi/hslf/model/TestTable.java
  28. 23
    26
      src/testcases/org/apache/poi/hssf/record/TestRecordFactory.java
  29. 91
    119
      src/testcases/org/apache/poi/hssf/record/TestSSTRecordSizeCalculator.java
  30. 68
    25
      src/testcases/org/apache/poi/hssf/record/TestStringRecord.java
  31. 5
    8
      src/testcases/org/apache/poi/hssf/record/TestSupBookRecord.java
  32. 4
    4
      src/testcases/org/apache/poi/hssf/record/TestTextObjectBaseRecord.java
  33. 32
    34
      src/testcases/org/apache/poi/hssf/record/TestTextObjectRecord.java
  34. 65
    53
      src/testcases/org/apache/poi/hssf/record/TestUnicodeString.java
  35. 1
    2
      src/testcases/org/apache/poi/hssf/record/constant/TestConstantValueParser.java
  36. 2
    2
      src/testcases/org/apache/poi/hssf/record/formula/TestArrayPtg.java
  37. 79
    38
      src/testcases/org/apache/poi/hssf/usermodel/TestHSSFCell.java

+ 2
- 0
src/documentation/content/xdocs/changes.xml Visa fil

@@ -37,6 +37,8 @@

<!-- Don't forget to update status.xml too! -->
<release version="3.5-beta4" date="2008-??-??">
<action dev="POI-DEVELOPERS" type="fix">46033 - fixed TableCell to correctly set text type</action>
<action dev="POI-DEVELOPERS" type="fix">46122 - fixed Picture.draw to skip rendering if picture data was not found</action>
<action dev="POI-DEVELOPERS" type="fix">15716 - memory usage optimisation - converted Ptg arrays into Formula objects</action>
<action dev="POI-DEVELOPERS" type="add">46065 - added implementation for VALUE function</action>
<action dev="POI-DEVELOPERS" type="add">45966 - added implementation for FIND function</action>

+ 2
- 0
src/documentation/content/xdocs/status.xml Visa fil

@@ -34,6 +34,8 @@
<!-- Don't forget to update changes.xml too! -->
<changes>
<release version="3.5-beta4" date="2008-??-??">
<action dev="POI-DEVELOPERS" type="fix">46033 - fixed TableCell to correctly set text type</action>
<action dev="POI-DEVELOPERS" type="fix">46122 - fixed Picture.draw to skip rendering if picture data was not found</action>
<action dev="POI-DEVELOPERS" type="fix">15716 - memory usage optimisation - converted Ptg arrays into Formula objects</action>
<action dev="POI-DEVELOPERS" type="add">46065 - added implementation for VALUE function</action>
<action dev="POI-DEVELOPERS" type="add">45966 - added implementation for FIND function</action>

+ 6
- 5
src/java/org/apache/poi/hssf/model/LinkTable.java Visa fil

@@ -29,8 +29,9 @@ import org.apache.poi.hssf.record.ExternalNameRecord;
import org.apache.poi.hssf.record.NameRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.SupBookRecord;
import org.apache.poi.hssf.record.UnicodeString;
import org.apache.poi.hssf.record.formula.Area3DPtg;
import org.apache.poi.hssf.record.formula.NameXPtg;
import org.apache.poi.hssf.record.formula.Ref3DPtg;

/**
* Link Table (OOO pdf reference: 4.10.3 ) <p/>
@@ -311,10 +312,10 @@ final class LinkTable {
return null;
}
int shIx = _externSheetRecord.getFirstSheetIndexFromRefIndex(extRefIndex);
UnicodeString usSheetName = ebr.getSheetNames()[shIx];
String usSheetName = ebr.getSheetNames()[shIx];
return new String[] {
ebr.getURL(),
usSheetName.getString(),
usSheetName,
};
}

@@ -345,9 +346,9 @@ final class LinkTable {
return result;
}

private static int getSheetIndex(UnicodeString[] sheetNames, String sheetName) {
private static int getSheetIndex(String[] sheetNames, String sheetName) {
for (int i = 0; i < sheetNames.length; i++) {
if (sheetNames[i].getString().equals(sheetName)) {
if (sheetNames[i].equals(sheetName)) {
return i;
}

+ 125
- 21
src/java/org/apache/poi/hssf/record/DrawingSelectionRecord.java Visa fil

@@ -17,25 +17,129 @@

package org.apache.poi.hssf.record;

public final class DrawingSelectionRecord extends AbstractEscherHolderRecord {
public static final short sid = 0xED;

public DrawingSelectionRecord()
{
}

public DrawingSelectionRecord( RecordInputStream in )
{
super( in );
}

protected String getRecordName()
{
return "MSODRAWINGSELECTION";
}

public short getSid()
{
return sid;
}
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;

/**
* MsoDrawingSelection (0x00ED)<p/>
* Reference:
* [MS-OGRAPH].pdf sec 2.4.69
*
* @author Josh Micich
*/
public final class DrawingSelectionRecord extends Record {
public static final short sid = 0x00ED;

/**
* From [MS-ODRAW].pdf sec 2.2.1<br/>
* TODO - make EscherRecordHeader {@link LittleEndianInput} aware and refactor with this
*/
private static final class OfficeArtRecordHeader {
public static final int ENCODED_SIZE = 8;
/**
* lower 4 bits is 'version' usually 0x01 or 0x0F (for containers)<br/>
* upper 12 bits is 'instance'
*/
private final int _verAndInstance;
/** value should be between 0xF000 and 0xFFFF */
private final int _type;
private final int _length;

public OfficeArtRecordHeader(LittleEndianInput in) {
_verAndInstance = in.readUShort();
_type = in.readUShort();
_length = in.readInt();
}

public void serialize(LittleEndianOutput out) {
out.writeShort(_verAndInstance);
out.writeShort(_type);
out.writeInt(_length);
}

public String debugFormatAsString() {
StringBuffer sb = new StringBuffer(32);
sb.append("ver+inst=").append(HexDump.shortToHex(_verAndInstance));
sb.append(" type=").append(HexDump.shortToHex(_type));
sb.append(" len=").append(HexDump.intToHex(_length));
return sb.toString();
}
}

// [MS-OGRAPH].pdf says that the data of this record is an OfficeArtFDGSL structure
// as described in[MS-ODRAW].pdf sec 2.2.33
private OfficeArtRecordHeader _header;
private int _cpsp;
/** a MSODGSLK enum value for the current selection mode */
private int _dgslk;
private int _spidFocus;
/** selected shape IDs (e.g. from EscherSpRecord.ShapeId) */
private int[] _shapeIds;

public DrawingSelectionRecord(RecordInputStream in) {
_header = new OfficeArtRecordHeader(in);
_cpsp = in.readInt();
_dgslk = in.readInt();
_spidFocus = in.readInt();
int nShapes = in.available() / 4;
int[] shapeIds = new int[nShapes];
for (int i = 0; i < nShapes; i++) {
shapeIds[i] = in.readInt();
}
_shapeIds = shapeIds;
}

public short getSid() {
return sid;
}

protected int getDataSize() {
return OfficeArtRecordHeader.ENCODED_SIZE
+ 12 // 3 int fields
+ _shapeIds.length * 4;
}

public int serialize(int offset, byte[] data) {
int dataSize = getDataSize();
int recSize = 4 + dataSize;
LittleEndianOutput out = new LittleEndianByteArrayOutputStream(data, offset, recSize);
out.writeShort(sid);
out.writeShort(dataSize);
_header.serialize(out);
out.writeInt(_cpsp);
out.writeInt(_dgslk);
out.writeInt(_spidFocus);
for (int i = 0; i < _shapeIds.length; i++) {
out.writeInt(_shapeIds[i]);
}
return recSize;
}

public Object clone() {
// currently immutable
return this;
}

public String toString() {
StringBuffer sb = new StringBuffer();

sb.append("[MSODRAWINGSELECTION]\n");
sb.append(" .rh =(").append(_header.debugFormatAsString()).append(")\n");
sb.append(" .cpsp =").append(HexDump.intToHex(_cpsp)).append('\n');
sb.append(" .dgslk =").append(HexDump.intToHex(_dgslk)).append('\n');
sb.append(" .spidFocus=").append(HexDump.intToHex(_spidFocus)).append('\n');
sb.append(" .shapeIds =(");
for (int i = 0; i < _shapeIds.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(HexDump.intToHex(_shapeIds[i]));
}
sb.append(")\n");

sb.append("[/MSODRAWINGSELECTION]\n");
return sb.toString();
}
}

+ 85
- 56
src/java/org/apache/poi/hssf/record/RecordFactory.java Visa fil

@@ -49,7 +49,7 @@ public final class RecordFactory {
* contains the classes for all the records we want to parse.<br/>
* Note - this most but not *every* subclass of Record.
*/
private static final Class[] records = {
private static final Class[] recordClasses = {
ArrayRecord.class,
BackupRecord.class,
BlankRecord.class,
@@ -163,7 +163,7 @@ public final class RecordFactory {
/**
* cache of the recordsToMap();
*/
private static Map recordsMap = recordsToMap(records);
private static Map recordsMap = recordsToMap(recordClasses);

private static short[] _allKnownRecordSIDs;
@@ -172,16 +172,33 @@ public final class RecordFactory {
* are returned digested into the non-mul form.
*/
public static Record [] createRecord(RecordInputStream in) {
Record record = createSingleRecord(in);
if (record instanceof DBCellRecord) {
// Not needed by POI. Regenerated from scratch by POI when spreadsheet is written
return new Record[] { null, };
}
if (record instanceof RKRecord) {
return new Record[] { convertToNumberRecord((RKRecord) record), };
}
if (record instanceof MulRKRecord) {
return convertRKRecords((MulRKRecord)record);
}
if (record instanceof MulBlankRecord) {
return convertMulBlankRecords((MulBlankRecord)record);
}
return new Record[] { record, };
}
private static Record createSingleRecord(RecordInputStream in) {
Constructor constructor = (Constructor) recordsMap.get(new Short(in.getSid()));

if (constructor == null) {
return new Record[] { new UnknownRecord(in), };
return new UnknownRecord(in);
}
Record retval;

try {
retval = ( Record ) constructor.newInstance(new Object[] { in });
return (Record) constructor.newInstance(new Object[] { in });
} catch (InvocationTargetException e) {
throw new RecordFormatException("Unable to construct record instance" , e.getTargetException());
} catch (IllegalArgumentException e) {
@@ -191,54 +208,55 @@ public final class RecordFactory {
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
if (retval instanceof RKRecord) {
// RK record is a slightly smaller alternative to NumberRecord
// POI likes NumberRecord better
RKRecord rk = ( RKRecord ) retval;
NumberRecord num = new NumberRecord();
}

num.setColumn(rk.getColumn());
num.setRow(rk.getRow());
num.setXFIndex(rk.getXFIndex());
num.setValue(rk.getRKNumber());
return new Record[] { num, };
}
if (retval instanceof DBCellRecord) {
// Not needed by POI. Regenerated from scratch by POI when spreadsheet is written
return new Record[] { null, };
}
// expand multiple records where necessary
if (retval instanceof MulRKRecord) {
MulRKRecord mrk = ( MulRKRecord ) retval;
/**
* RK record is a slightly smaller alternative to NumberRecord
* POI likes NumberRecord better
*/
private static NumberRecord convertToNumberRecord(RKRecord rk) {
NumberRecord num = new NumberRecord();
num.setColumn(rk.getColumn());
num.setRow(rk.getRow());
num.setXFIndex(rk.getXFIndex());
num.setValue(rk.getRKNumber());
return num;
}

Record[] mulRecs = new Record[ mrk.getNumColumns() ];
for (int k = 0; k < mrk.getNumColumns(); k++) {
NumberRecord nr = new NumberRecord();
/**
* Converts a {@link MulRKRecord} into an equivalent array of {@link NumberRecord}s
*/
private static NumberRecord[] convertRKRecords(MulRKRecord mrk) {

nr.setColumn(( short ) (k + mrk.getFirstColumn()));
nr.setRow(mrk.getRow());
nr.setXFIndex(mrk.getXFAt(k));
nr.setValue(mrk.getRKNumberAt(k));
mulRecs[ k ] = nr;
}
return mulRecs;
NumberRecord[] mulRecs = new NumberRecord[mrk.getNumColumns()];
for (int k = 0; k < mrk.getNumColumns(); k++) {
NumberRecord nr = new NumberRecord();

nr.setColumn((short) (k + mrk.getFirstColumn()));
nr.setRow(mrk.getRow());
nr.setXFIndex(mrk.getXFAt(k));
nr.setValue(mrk.getRKNumberAt(k));
mulRecs[k] = nr;
}
if (retval instanceof MulBlankRecord) {
MulBlankRecord mb = ( MulBlankRecord ) retval;
return mulRecs;
}

Record[] mulRecs = new Record[ mb.getNumColumns() ];
for (int k = 0; k < mb.getNumColumns(); k++) {
BlankRecord br = new BlankRecord();
/**
* Converts a {@link MulBlankRecord} into an equivalent array of {@link BlankRecord}s
*/
private static BlankRecord[] convertMulBlankRecords(MulBlankRecord mb) {

br.setColumn(( short ) (k + mb.getFirstColumn()));
br.setRow(mb.getRow());
br.setXFIndex(mb.getXFAt(k));
mulRecs[ k ] = br;
}
return mulRecs;
BlankRecord[] mulRecs = new BlankRecord[mb.getNumColumns()];
for (int k = 0; k < mb.getNumColumns(); k++) {
BlankRecord br = new BlankRecord();

br.setColumn((short) (k + mb.getFirstColumn()));
br.setRow(mb.getRow());
br.setXFIndex(mb.getXFAt(k));
mulRecs[k] = br;
}
return new Record[] { retval, };
return mulRecs;
}

/**
@@ -325,19 +343,26 @@ public final class RecordFactory {
// After EOF, Excel seems to pad block with zeros
continue;
}
Record[] recs = createRecord(recStream); // handle MulRK records
Record record = createSingleRecord(recStream);

if (recs.length > 1) {
for (int k = 0; k < recs.length; k++) {
records.add(recs[ k ]); // these will be number records
}
if (record instanceof DBCellRecord) {
// Not needed by POI. Regenerated from scratch by POI when spreadsheet is written
continue;
}
Record record = recs[ 0 ];

if (record == null) {
if (record instanceof RKRecord) {
records.add(convertToNumberRecord((RKRecord) record));
continue;
}
if (record instanceof MulRKRecord) {
addAll(records, convertRKRecords((MulRKRecord)record));
continue;
}
if (record instanceof MulBlankRecord) {
addAll(records, convertMulBlankRecords((MulBlankRecord)record));
continue;
}

if (record.getSid() == DrawingGroupRecord.sid
&& lastRecord instanceof DrawingGroupRecord) {
DrawingGroupRecord lastDGRecord = (DrawingGroupRecord) lastRecord;
@@ -354,8 +379,6 @@ public final class RecordFactory {
records.add(record);
} else if (lastRecord instanceof DrawingGroupRecord) {
((DrawingGroupRecord)lastRecord).processContinueRecord(contRec.getData());
} else if (lastRecord instanceof StringRecord) {
((StringRecord)lastRecord).processContinueRecord(contRec.getData());
} else if (lastRecord instanceof UnknownRecord) {
//Gracefully handle records that we don't know about,
//that happen to be continued
@@ -373,4 +396,10 @@ public final class RecordFactory {
}
return records;
}

private static void addAll(List destList, Record[] srcRecs) {
for (int i = 0; i < srcRecs.length; i++) {
destList.add(srcRecs[i]);
}
}
}

+ 0
- 10
src/java/org/apache/poi/hssf/record/RecordInputStream.java Visa fil

@@ -320,16 +320,6 @@ public final class RecordInputStream extends InputStream implements LittleEndian
}
}

/** Returns an excel style unicode string from the bytes reminaing in the record.
* <i>Note:</i> Unicode strings differ from <b>normal</b> strings due to the addition of
* formatting information.
*
* @return The unicode string representation of the remaining bytes.
*/
public UnicodeString readUnicodeString() {
return new UnicodeString(this);
}

/** Returns the remaining bytes for the current record.
*
* @return The remaining bytes of the current record.

+ 15
- 74
src/java/org/apache/poi/hssf/record/SSTRecord.java Visa fil

@@ -17,14 +17,16 @@

package org.apache.poi.hssf.record;

import java.util.Iterator;

import org.apache.poi.hssf.record.cont.ContinuableRecord;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.IntMapper;
import org.apache.poi.util.LittleEndianConsts;

import java.util.Iterator;

/**
* Title: Static String Table Record
* <P>
* Title: Static String Table Record (0x00FC)<p/>
*
* Description: This holds all the strings for LabelSSTRecords.
* <P>
* REFERENCE: PG 389 Microsoft Excel 97 Developer's Kit (ISBN:
@@ -37,27 +39,20 @@ import java.util.Iterator;
* @see org.apache.poi.hssf.record.LabelSSTRecord
* @see org.apache.poi.hssf.record.ContinueRecord
*/
public final class SSTRecord extends Record {
public final class SSTRecord extends ContinuableRecord {
public static final short sid = 0x00FC;

private static UnicodeString EMPTY_STRING = new UnicodeString("");

/** how big can an SST record be? As big as any record can be: 8228 bytes */
static final int MAX_RECORD_SIZE = 8228;
private static final UnicodeString EMPTY_STRING = new UnicodeString("");

// TODO - move these constants to test class (the only consumer)
/** standard record overhead: two shorts (record id plus data space size)*/
static final int STD_RECORD_OVERHEAD =
2 * LittleEndianConsts.SHORT_SIZE;
static final int STD_RECORD_OVERHEAD = 2 * LittleEndianConsts.SHORT_SIZE;

/** SST overhead: the standard record overhead, plus the number of strings and the number of unique strings -- two ints */
static final int SST_RECORD_OVERHEAD =
( STD_RECORD_OVERHEAD + ( 2 * LittleEndianConsts.INT_SIZE ) );
static final int SST_RECORD_OVERHEAD = STD_RECORD_OVERHEAD + 2 * LittleEndianConsts.INT_SIZE;

/** how much data can we stuff into an SST record? That would be _max minus the standard SST record overhead */
static final int MAX_DATA_SPACE = MAX_RECORD_SIZE - SST_RECORD_OVERHEAD;

/** overhead for each string includes the string's character count (a short) and the flag describing its characteristics (a byte) */
static final int STRING_MINIMAL_OVERHEAD = LittleEndianConsts.SHORT_SIZE + LittleEndianConsts.BYTE_SIZE;
static final int MAX_DATA_SPACE = RecordInputStream.MAX_RECORD_DATA_SIZE - 8;

/** union of strings in the SST and EXTSST */
private int field_1_num_strings;
@@ -133,37 +128,6 @@ public final class SSTRecord extends Record {
return field_2_num_unique_strings;
}

/**
* USE THIS METHOD AT YOUR OWN PERIL: THE <code>addString</code>
* METHODS MANIPULATE THE NUMBER OF STRINGS AS A SIDE EFFECT; YOUR
* ATTEMPTS AT MANIPULATING THE STRING COUNT IS LIKELY TO BE VERY
* WRONG AND WILL RESULT IN BAD BEHAVIOR WHEN THIS RECORD IS
* WRITTEN OUT AND ANOTHER PROCESS ATTEMPTS TO READ THE RECORD
*
* @param count number of strings
*
*/

public void setNumStrings( final int count )
{
field_1_num_strings = count;
}

/**
* USE THIS METHOD AT YOUR OWN PERIL: THE <code>addString</code>
* METHODS MANIPULATE THE NUMBER OF UNIQUE STRINGS AS A SIDE
* EFFECT; YOUR ATTEMPTS AT MANIPULATING THE UNIQUE STRING COUNT
* IS LIKELY TO BE VERY WRONG AND WILL RESULT IN BAD BEHAVIOR WHEN
* THIS RECORD IS WRITTEN OUT AND ANOTHER PROCESS ATTEMPTS TO READ
* THE RECORD
*
* @param count number of strings
*/

public void setNumUniqueStrings( final int count )
{
field_2_num_unique_strings = count;
}

/**
* Get a particular string by its index
@@ -178,11 +142,6 @@ public final class SSTRecord extends Record {
return (UnicodeString) field_3_strings.get( id );
}

public boolean isString16bit( final int id )
{
UnicodeString unicodeString = ( (UnicodeString) field_3_strings.get( id ) );
return ( ( unicodeString.getOptionFlags() & 0x01 ) == 1 );
}

/**
* Return a debugging string representation
@@ -350,29 +309,11 @@ public final class SSTRecord extends Record {
return field_3_strings.size();
}

/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
* byte array.
*
* @return size
*/

public int serialize( int offset, byte[] data )
{
SSTSerializer serializer = new SSTSerializer(
field_3_strings, getNumStrings(), getNumUniqueStrings() );
int bytes = serializer.serialize( offset, data );
protected void serialize(ContinuableRecordOutput out) {
SSTSerializer serializer = new SSTSerializer(field_3_strings, getNumStrings(), getNumUniqueStrings() );
serializer.serialize(out);
bucketAbsoluteOffsets = serializer.getBucketAbsoluteOffsets();
bucketRelativeOffsets = serializer.getBucketRelativeOffsets();
return bytes;
}


protected int getDataSize() {
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(field_3_strings);
int recordSize = calculator.getRecordSize();
return recordSize-4;
}

SSTDeserializer getDeserializer()

+ 0
- 76
src/java/org/apache/poi/hssf/record/SSTRecordHeader.java Visa fil

@@ -1,76 +0,0 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.hssf.record;

import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianConsts;

/**
* Write out an SST header record.
*
* @author Glen Stampoultzis (glens at apache.org)
*/
class SSTRecordHeader
{
int numStrings;
int numUniqueStrings;

public SSTRecordHeader( int numStrings, int numUniqueStrings )
{
this.numStrings = numStrings;
this.numUniqueStrings = numUniqueStrings;
}

/**
* Writes out the SST record. This consists of the sid, the record size, the number of
* strings and the number of unique strings.
*
* @param data The data buffer to write the header to.
* @param bufferIndex The index into the data buffer where the header should be written.
* @param recSize The number of records written.
*
* @return The bufer of bytes modified.
*/
public int writeSSTHeader( UnicodeString.UnicodeRecordStats stats, byte[] data, int bufferIndex, int recSize )
{
int offset = bufferIndex;

LittleEndian.putShort( data, offset, SSTRecord.sid );
offset += LittleEndianConsts.SHORT_SIZE;
stats.recordSize += LittleEndianConsts.SHORT_SIZE;
stats.remainingSize -= LittleEndianConsts.SHORT_SIZE;
//Delay writing the length
stats.lastLengthPos = offset;
offset += LittleEndianConsts.SHORT_SIZE;
stats.recordSize += LittleEndianConsts.SHORT_SIZE;
stats.remainingSize -= LittleEndianConsts.SHORT_SIZE;
LittleEndian.putInt( data, offset, numStrings );
offset += LittleEndianConsts.INT_SIZE;
stats.recordSize += LittleEndianConsts.INT_SIZE;
stats.remainingSize -= LittleEndianConsts.INT_SIZE;
LittleEndian.putInt( data, offset, numUniqueStrings );
offset += LittleEndianConsts.INT_SIZE;
stats.recordSize += LittleEndianConsts.INT_SIZE;
stats.remainingSize -= LittleEndianConsts.INT_SIZE;

return offset - bufferIndex;
}

}

+ 0
- 51
src/java/org/apache/poi/hssf/record/SSTRecordSizeCalculator.java Visa fil

@@ -1,51 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.hssf.record;

import org.apache.poi.util.IntMapper;

/**
* Used to calculate the record sizes for a particular record. This kind of
* sucks because it's similar to the SST serialization code. In general
* the SST serialization code needs to be rewritten.
*
* @author Glen Stampoultzis (glens at apache.org)
* @author Jason Height (jheight at apache.org)
*/
class SSTRecordSizeCalculator
{
private IntMapper strings;

public SSTRecordSizeCalculator(IntMapper strings)
{
this.strings = strings;
}

public int getRecordSize() {
UnicodeString.UnicodeRecordStats rs = new UnicodeString.UnicodeRecordStats();
rs.remainingSize -= SSTRecord.SST_RECORD_OVERHEAD;
rs.recordSize += SSTRecord.SST_RECORD_OVERHEAD;
for (int i=0; i < strings.size(); i++ )
{
UnicodeString unistr = ( (UnicodeString) strings.get(i));
unistr.getRecordSize(rs);
}
return rs.recordSize;
}
}

+ 19
- 42
src/java/org/apache/poi/hssf/record/SSTSerializer.java Visa fil

@@ -1,4 +1,3 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@@ -15,12 +14,11 @@
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.hssf.record;

import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.IntMapper;
import org.apache.poi.util.LittleEndian;

/**
* This class handles serialization of SST records. It utilizes the record processor
@@ -28,71 +26,50 @@ import org.apache.poi.util.LittleEndian;
*
* @author Glen Stampoultzis (glens at apache.org)
*/
class SSTSerializer
{
final class SSTSerializer {

// todo: make private again
private IntMapper strings;
private final int _numStrings;
private final int _numUniqueStrings;

private SSTRecordHeader sstRecordHeader;
private final IntMapper strings;

/** Offsets from the beginning of the SST record (even across continuations) */
int[] bucketAbsoluteOffsets;
private final int[] bucketAbsoluteOffsets;
/** Offsets relative the start of the current SST or continue record */
int[] bucketRelativeOffsets;
private final int[] bucketRelativeOffsets;
int startOfSST, startOfRecord;

public SSTSerializer( IntMapper strings, int numStrings, int numUniqueStrings )
{
this.strings = strings;
this.sstRecordHeader = new SSTRecordHeader( numStrings, numUniqueStrings );
_numStrings = numStrings;
_numUniqueStrings = numUniqueStrings;

int infoRecs = ExtSSTRecord.getNumberOfInfoRecsForStrings(strings.size());
this.bucketAbsoluteOffsets = new int[infoRecs];
this.bucketRelativeOffsets = new int[infoRecs];
}

/**
* Create a byte array consisting of an SST record and any
* required Continue records, ready to be written out.
* <p>
* If an SST record and any subsequent Continue records are read
* in to create this instance, this method should produce a byte
* array that is identical to the byte array produced by
* concatenating the input records' data.
*
* @return the byte array
*/
public int serialize(int offset, byte[] data )
{
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats();
sstRecordHeader.writeSSTHeader( stats, data, 0 + offset, 0 );
int pos = offset + SSTRecord.SST_RECORD_OVERHEAD;
public void serialize(ContinuableRecordOutput out) {
out.writeInt(_numStrings);
out.writeInt(_numUniqueStrings);

for ( int k = 0; k < strings.size(); k++ )
{
if (k % ExtSSTRecord.DEFAULT_BUCKET_SIZE == 0)
{
int rOff = out.getTotalSize();
int index = k/ExtSSTRecord.DEFAULT_BUCKET_SIZE;
if (index < ExtSSTRecord.MAX_BUCKETS) {
//Excel only indexes the first 128 buckets.
bucketAbsoluteOffsets[index] = pos-offset;
bucketRelativeOffsets[index] = pos-offset;
}
bucketAbsoluteOffsets[index] = rOff;
bucketRelativeOffsets[index] = rOff;
}
}
UnicodeString s = getUnicodeString(k);
pos += s.serialize(stats, pos, data);
}
//Check to see if there is a hanging continue record length
if (stats.lastLengthPos != -1) {
short lastRecordLength = (short)(pos - stats.lastLengthPos-2);
if (lastRecordLength > 8224)
throw new InternalError();

LittleEndian.putShort(data, stats.lastLengthPos, lastRecordLength);
}
return pos - offset;
}
s.serialize(out);
}
}


private UnicodeString getUnicodeString( int index )

+ 29
- 89
src/java/org/apache/poi/hssf/record/StringRecord.java Visa fil

@@ -17,19 +17,23 @@

package org.apache.poi.hssf.record;

import org.apache.poi.util.LittleEndian;
import org.apache.poi.hssf.record.cont.ContinuableRecord;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.StringUtil;

/**
* Supports the STRING record structure. (0x0207)
* STRING (0x0207)<p/>
*
* Stores the cached result of a text formula
*
* @author Glen Stampoultzis (glens at apache.org)
*/
public class StringRecord extends Record {
public final static short sid = 0x0207;
private int field_1_string_length;
private byte field_2_unicode_flag;
private String field_3_string;
public final class StringRecord extends ContinuableRecord {

public final static short sid = 0x0207;

private boolean _is16bitUnicode;
private String _text;


public StringRecord()
@@ -39,77 +43,24 @@ public class StringRecord extends Record {
/**
* @param in the RecordInputstream to read the record from
*/
public StringRecord( RecordInputStream in)
{
field_1_string_length = in.readShort();
field_2_unicode_flag = in.readByte();
byte[] data = in.readRemainder();
//Why isn't this using the in.readString methods???
if (isUnCompressedUnicode())
{
field_3_string = StringUtil.getFromUnicodeLE(data, 0, field_1_string_length );
}
else
{
field_3_string = StringUtil.getFromCompressedUnicode(data, 0, field_1_string_length);
public StringRecord( RecordInputStream in) {
int field_1_string_length = in.readUShort();
_is16bitUnicode = in.readByte() != 0x00;
if (_is16bitUnicode){
_text = in.readUnicodeLEString(field_1_string_length);
} else {
_text = in.readCompressedUnicode(field_1_string_length);
}
}
public void processContinueRecord(byte[] data) {
if(isUnCompressedUnicode()) {
field_3_string += StringUtil.getFromUnicodeLE(data, 0, field_1_string_length - field_3_string.length());
} else {
field_3_string += StringUtil.getFromCompressedUnicode(data, 0, field_1_string_length - field_3_string.length());
}
}

private int getStringByteLength()
{
return isUnCompressedUnicode() ? field_1_string_length * 2 : field_1_string_length;
}

protected int getDataSize() {
return 2 + 1 + getStringByteLength();
}

/**
* is this uncompressed unicode (16bit)? Or just 8-bit compressed?
* @return isUnicode - True for 16bit- false for 8bit
*/
public boolean isUnCompressedUnicode()
{
return (field_2_unicode_flag == 1);
protected void serialize(ContinuableRecordOutput out) {
out.writeShort(_text.length());
out.writeStringData(_text);
}

/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
* byte array.
*
* @param offset to begin writing at
* @param data byte array containing instance data
* @return number of bytes written
*/
public int serialize( int offset, byte[] data )
{
LittleEndian.putUShort(data, 0 + offset, sid);
LittleEndian.putUShort(data, 2 + offset, 3 + getStringByteLength());
LittleEndian.putUShort(data, 4 + offset, field_1_string_length);
data[6 + offset] = field_2_unicode_flag;
if (isUnCompressedUnicode())
{
StringUtil.putUnicodeLE(field_3_string, data, 7 + offset);
}
else
{
StringUtil.putCompressedUnicode(field_3_string, data, 7 + offset);
}
return getRecordSize();
}

/**
* return the non static version of the id for this record.
*/
public short getSid()
{
return sid;
@@ -120,26 +71,16 @@ public class StringRecord extends Record {
*/
public String getString()
{
return field_3_string;
return _text;
}

/**
* Sets whether the string is compressed or not
* @param unicode_flag 1 = uncompressed, 0 = compressed
*/
public void setCompressedFlag( byte unicode_flag )
{
this.field_2_unicode_flag = unicode_flag;
}

/**
* Sets the string represented by this record.
*/
public void setString( String string )
{
this.field_1_string_length = string.length();
this.field_3_string = string;
setCompressedFlag(StringUtil.hasMultibyte(string) ? (byte)1 : (byte)0);
public void setString(String string) {
_text = string;
_is16bitUnicode = StringUtil.hasMultibyte(string);
}

public String toString()
@@ -148,16 +89,15 @@ public class StringRecord extends Record {

buffer.append("[STRING]\n");
buffer.append(" .string = ")
.append(field_3_string).append("\n");
.append(_text).append("\n");
buffer.append("[/STRING]\n");
return buffer.toString();
}
public Object clone() {
StringRecord rec = new StringRecord();
rec.field_1_string_length = this.field_1_string_length;
rec.field_2_unicode_flag= this.field_2_unicode_flag;
rec.field_3_string = this.field_3_string;
rec._is16bitUnicode= _is16bitUnicode;
rec._text = _text;
return rec;
}
}

+ 47
- 53
src/java/org/apache/poi/hssf/record/SupBookRecord.java Visa fil

@@ -17,11 +17,12 @@

package org.apache.poi.hssf.record;

import org.apache.poi.hssf.record.UnicodeString.UnicodeRecordStats;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;

/**
* Title: Sup Book (EXTERNALBOOK) <P>
* Title: Sup Book - EXTERNALBOOK (0x01AE) <p/>
* Description: A External Workbook Description (Supplemental Book)
* Its only a dummy record for making new ExternSheet Record <P>
* REFERENCE: 5.38<P>
@@ -31,25 +32,25 @@ import org.apache.poi.util.LittleEndian;
*/
public final class SupBookRecord extends Record {

public final static short sid = 0x1AE;
public final static short sid = 0x01AE;

private static final short SMALL_RECORD_SIZE = 4;
private static final short TAG_INTERNAL_REFERENCES = 0x0401;
private static final short TAG_ADD_IN_FUNCTIONS = 0x3A01;

private short field_1_number_of_sheets;
private UnicodeString field_2_encoded_url;
private UnicodeString[] field_3_sheet_names;
private boolean _isAddInFunctions;
private short field_1_number_of_sheets;
private String field_2_encoded_url;
private String[] field_3_sheet_names;
private boolean _isAddInFunctions;


public static SupBookRecord createInternalReferences(short numberOfSheets) {
return new SupBookRecord(false, numberOfSheets);
}
public static SupBookRecord createAddInFunctions() {
return new SupBookRecord(true, (short)0);
}
public static SupBookRecord createExternalReferences(UnicodeString url, UnicodeString[] sheetNames) {
public static SupBookRecord createExternalReferences(String url, String[] sheetNames) {
return new SupBookRecord(url, sheetNames);
}
private SupBookRecord(boolean isAddInFuncs, short numberOfSheets) {
@@ -59,7 +60,7 @@ public final class SupBookRecord extends Record {
field_3_sheet_names = null;
_isAddInFunctions = isAddInFuncs;
}
public SupBookRecord(UnicodeString url, UnicodeString[] sheetNames) {
public SupBookRecord(String url, String[] sheetNames) {
field_1_number_of_sheets = (short) sheetNames.length;
field_2_encoded_url = url;
field_3_sheet_names = sheetNames;
@@ -84,18 +85,18 @@ public final class SupBookRecord extends Record {
* @param offset of the record's data (provided a big array of the file)
*/
public SupBookRecord(RecordInputStream in) {
int recLen = in.remaining();
int recLen = in.remaining();
field_1_number_of_sheets = in.readShort();
if(recLen > SMALL_RECORD_SIZE) {
// 5.38.1 External References
_isAddInFunctions = false;

field_2_encoded_url = in.readUnicodeString();
UnicodeString[] sheetNames = new UnicodeString[field_1_number_of_sheets];
field_2_encoded_url = in.readString();
String[] sheetNames = new String[field_1_number_of_sheets];
for (int i = 0; i < sheetNames.length; i++) {
sheetNames[i] = in.readUnicodeString();
sheetNames[i] = in.readString();
}
field_3_sheet_names = sheetNames;
return;
@@ -103,7 +104,7 @@ public final class SupBookRecord extends Record {
// else not 'External References'
field_2_encoded_url = null;
field_3_sheet_names = null;
short nextShort = in.readShort();
if(nextShort == TAG_INTERNAL_REFERENCES) {
// 5.38.2 'Internal References'
@@ -116,7 +117,7 @@ public final class SupBookRecord extends Record {
+ field_1_number_of_sheets + ")");
}
} else {
throw new RuntimeException("invalid EXTERNALBOOK code ("
throw new RuntimeException("invalid EXTERNALBOOK code ("
+ Integer.toHexString(nextShort) + ")");
}
}
@@ -124,7 +125,7 @@ public final class SupBookRecord extends Record {
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append(getClass().getName()).append(" [SUPBOOK ");
if(isExternalReferences()) {
sb.append("External References");
sb.append(" nSheets=").append(field_1_number_of_sheets);
@@ -143,18 +144,14 @@ public final class SupBookRecord extends Record {
return SMALL_RECORD_SIZE;
}
int sum = 2; // u16 number of sheets
UnicodeRecordStats urs = new UnicodeRecordStats();
field_2_encoded_url.getRecordSize(urs);
sum += urs.recordSize;

sum += StringUtil.getEncodedSize(field_2_encoded_url);

for(int i=0; i<field_3_sheet_names.length; i++) {
urs = new UnicodeRecordStats();
field_3_sheet_names[i].getRecordSize(urs);
sum += urs.recordSize;
sum += StringUtil.getEncodedSize(field_3_sheet_names[i]);
}
return sum;
}

/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
@@ -165,29 +162,26 @@ public final class SupBookRecord extends Record {
* @return number of bytes written
*/
public int serialize(int offset, byte [] data) {
LittleEndian.putShort(data, 0 + offset, sid);
int dataSize = getDataSize();
LittleEndian.putShort(data, 2 + offset, (short) dataSize);
LittleEndian.putShort(data, 4 + offset, field_1_number_of_sheets);
int recordSize = 4 + dataSize;
LittleEndianOutput out = new LittleEndianByteArrayOutputStream(data, offset, recordSize);

out.writeShort(sid);
out.writeShort(dataSize);
out.writeShort(field_1_number_of_sheets);

if(isExternalReferences()) {
int currentOffset = 6 + offset;
UnicodeRecordStats urs = new UnicodeRecordStats();
field_2_encoded_url.serialize(urs, currentOffset, data);
currentOffset += urs.recordSize;
StringUtil.writeUnicodeString(out, field_2_encoded_url);

for(int i=0; i<field_3_sheet_names.length; i++) {
urs = new UnicodeRecordStats();
field_3_sheet_names[i].serialize(urs, currentOffset, data);
currentOffset += urs.recordSize;
StringUtil.writeUnicodeString(out, field_3_sheet_names[i]);
}
} else {
short field2val = _isAddInFunctions ? TAG_ADD_IN_FUNCTIONS : TAG_INTERNAL_REFERENCES;
LittleEndian.putShort(data, 6 + offset, field2val);
int field2val = _isAddInFunctions ? TAG_ADD_IN_FUNCTIONS : TAG_INTERNAL_REFERENCES;
out.writeShort(field2val);
}
return dataSize + 4;
return recordSize;
}

public void setNumberOfSheets(short number){
@@ -203,7 +197,7 @@ public final class SupBookRecord extends Record {
return sid;
}
public String getURL() {
String encodedUrl = field_2_encoded_url.getString();
String encodedUrl = field_2_encoded_url;
switch(encodedUrl.charAt(0)) {
case 0: // Reference to an empty workbook name
return encodedUrl.substring(1); // will this just be empty string?
@@ -211,7 +205,7 @@ public final class SupBookRecord extends Record {
return decodeFileName(encodedUrl);
case 2: // Self-referential external reference
return encodedUrl.substring(1);
}
return encodedUrl;
}
@@ -219,18 +213,18 @@ public final class SupBookRecord extends Record {
return encodedUrl.substring(1);
// TODO the following special characters may appear in the rest of the string, and need to get interpreted
/* see "MICROSOFT OFFICE EXCEL 97-2007 BINARY FILE FORMAT SPECIFICATION"
chVolume 1
chSameVolume 2
chVolume 1
chSameVolume 2
chDownDir 3
chUpDir 4
chUpDir 4
chLongVolume 5
chStartupDir 6
chAltStartupDir 7
chLibDir 8
*/
}
public UnicodeString[] getSheetNames() {
return (UnicodeString[]) field_3_sheet_names.clone();
public String[] getSheetNames() {
return (String[]) field_3_sheet_names.clone();
}
}

+ 19
- 109
src/java/org/apache/poi/hssf/record/TextObjectRecord.java Visa fil

@@ -17,16 +17,13 @@

package org.apache.poi.hssf.record;

import java.io.UnsupportedEncodingException;
import org.apache.poi.hssf.record.cont.ContinuableRecord;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.hssf.record.formula.Ptg;
import org.apache.poi.hssf.usermodel.HSSFRichTextString;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndianOutput;

/**
* The TXO record (0x01B6) is used to define the properties of a text box. It is
@@ -36,7 +33,7 @@ import org.apache.poi.util.LittleEndianOutput;
*
* @author Glen Stampoultzis (glens at apache.org)
*/
public final class TextObjectRecord extends Record {
public final class TextObjectRecord extends ContinuableRecord {
public final static short sid = 0x01B6;

private static final int FORMAT_RUN_ENCODED_SIZE = 8; // 2 shorts and 4 bytes reserved
@@ -163,30 +160,7 @@ public final class TextObjectRecord extends Record {
return sid;
}

/**
* Only for the current record. does not include any subsequent Continue
* records
*/
private int getCurrentRecordDataSize() {
int result = 2 + 2 + 2 + 2 + 2 + 2 + 2 + 4;
if (_linkRefPtg != null) {
result += 2 // formula size
+ 4 // unknownInt
+_linkRefPtg.getSize();
if (_unknownPostFormulaByte != null) {
result += 1;
}
}
return result;
}

private int serializeTXORecord(int offset, byte[] data) {
int dataSize = getCurrentRecordDataSize();
int recSize = dataSize+4;
LittleEndianOutput out = new LittleEndianByteArrayOutputStream(data, offset, recSize);
out.writeShort(TextObjectRecord.sid);
out.writeShort(dataSize);
private void serializeTXORecord(ContinuableRecordOutput out) {
out.writeShort(field_1_options);
out.writeShort(field_2_textOrientation);
@@ -206,79 +180,23 @@ public final class TextObjectRecord extends Record {
out.writeByte(_unknownPostFormulaByte.byteValue());
}
}
return recSize;
}

private int serializeTrailingRecords(int offset, byte[] data) {
byte[] textBytes;
try {
textBytes = _text.getString().getBytes("UTF-16LE");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e.getMessage(), e);
}
int remainingLength = textBytes.length;

int countTextBytesWritten = 0;
int pos = offset;
// (regardless what was read, we always serialize double-byte
// unicode characters (UTF-16LE).
Byte unicodeFlag = new Byte((byte)1);
while (remainingLength > 0) {
int chunkSize = Math.min(RecordInputStream.MAX_RECORD_DATA_SIZE - 2, remainingLength);
remainingLength -= chunkSize;
pos += ContinueRecord.write(data, pos, unicodeFlag, textBytes, countTextBytesWritten, chunkSize);
countTextBytesWritten += chunkSize;
}

byte[] formatData = createFormatData(_text);
pos += ContinueRecord.write(data, pos, null, formatData);
return pos - offset;
private void serializeTrailingRecords(ContinuableRecordOutput out) {
out.writeContinue();
out.writeStringData(_text.getString());
out.writeContinue();
writeFormatData(out, _text);
}

private int getTrailingRecordsSize() {
if (_text.length() < 1) {
return 0;
}
int encodedTextSize = 0;
int textBytesLength = _text.length() * LittleEndian.SHORT_SIZE;
while (textBytesLength > 0) {
int chunkSize = Math.min(RecordInputStream.MAX_RECORD_DATA_SIZE - 2, textBytesLength);
textBytesLength -= chunkSize;

encodedTextSize += 4; // +4 for ContinueRecord sid+size
encodedTextSize += 1+chunkSize; // +1 for compressed unicode flag,
}

int encodedFormatSize = (_text.numFormattingRuns() + 1) * FORMAT_RUN_ENCODED_SIZE
+ 4; // +4 for ContinueRecord sid+size
return encodedTextSize + encodedFormatSize;
}
protected void serialize(ContinuableRecordOutput out) {


public int serialize(int offset, byte[] data) {

int expectedTotalSize = getRecordSize();
int totalSize = serializeTXORecord(offset, data);
serializeTXORecord(out);
if (_text.getString().length() > 0) {
totalSize += serializeTrailingRecords(offset+totalSize, data);
serializeTrailingRecords(out);
}
if (totalSize != expectedTotalSize)
throw new RecordFormatException(totalSize
+ " bytes written but getRecordSize() reports " + expectedTotalSize);
return totalSize;
}

/**
* Note - this total size includes all potential {@link ContinueRecord}s written
* but it is not the "ushort size" value to be written at the start of the first BIFF record
*/
protected int getDataSize() {
return getCurrentRecordDataSize() + getTrailingRecordsSize();
}

private int getFormattingDataLength() {
if (_text.length() < 1) {
// important - no formatting data if text is empty
@@ -287,25 +205,17 @@ public final class TextObjectRecord extends Record {
return (_text.numFormattingRuns() + 1) * FORMAT_RUN_ENCODED_SIZE;
}

private static byte[] createFormatData(HSSFRichTextString str) {
private static void writeFormatData(ContinuableRecordOutput out , HSSFRichTextString str) {
int nRuns = str.numFormattingRuns();
byte[] result = new byte[(nRuns + 1) * FORMAT_RUN_ENCODED_SIZE];
int pos = 0;
for (int i = 0; i < nRuns; i++) {
LittleEndian.putUShort(result, pos, str.getIndexOfFormattingRun(i));
pos += 2;
out.writeShort(str.getIndexOfFormattingRun(i));
int fontIndex = str.getFontOfFormattingRun(i);
LittleEndian.putUShort(result, pos, fontIndex == str.NO_FONT ? 0 : fontIndex);
pos += 2;
pos += 4; // skip reserved
out.writeShort(fontIndex == str.NO_FONT ? 0 : fontIndex);
out.writeInt(0); // skip reserved
}
LittleEndian.putUShort(result, pos, str.length());
pos += 2;
LittleEndian.putUShort(result, pos, 0);
pos += 2;
pos += 4; // skip reserved

return result;
out.writeShort(str.length());
out.writeShort(0);
out.writeInt(0); // skip reserved
}

/**

+ 107
- 414
src/java/org/apache/poi/hssf/record/UnicodeString.java Visa fil

@@ -17,75 +17,84 @@

package org.apache.poi.hssf.record;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;

import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.HexDump;

import java.util.Iterator;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;

/**
* Title: Unicode String<P>
* Description: Unicode String record. We implement these as a record, although
* they are really just standard fields that are in several records.
* It is considered more desirable then repeating it in all of them.<P>
* REFERENCE: PG 264 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)<P>
* Title: Unicode String<p/>
* Description: Unicode String - just standard fields that are in several records.
* It is considered more desirable then repeating it in all of them.<p/>
* REFERENCE: PG 264 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)<p/>
* @author Andrew C. Oliver
* @author Marc Johnson (mjohnson at apache dot org)
* @author Glen Stampoultzis (glens at apache.org)
*/
public final class UnicodeString implements Comparable {
private short field_1_charCount; // = 0;
private byte field_2_optionflags; // = 0;
private String field_3_string; // = null;
private short field_1_charCount;
private byte field_2_optionflags;
private String field_3_string;
private List field_4_format_runs;
private byte[] field_5_ext_rst;
private static final BitField highByte = BitFieldFactory.getInstance(0x1);
private static final BitField extBit = BitFieldFactory.getInstance(0x4);
private static final BitField richText = BitFieldFactory.getInstance(0x8);
private static final BitField highByte = BitFieldFactory.getInstance(0x1);
private static final BitField extBit = BitFieldFactory.getInstance(0x4);
private static final BitField richText = BitFieldFactory.getInstance(0x8);

public static class FormatRun implements Comparable {
short character;
short fontIndex;
short character;
short fontIndex;

public FormatRun(short character, short fontIndex) {
this.character = character;
this.fontIndex = fontIndex;
}
public FormatRun(short character, short fontIndex) {
this.character = character;
this.fontIndex = fontIndex;
}

public short getCharacterPos() {
return character;
}
public FormatRun(LittleEndianInput in) {
this(in.readShort(), in.readShort());
}

public short getFontIndex() {
return fontIndex;
}
public short getCharacterPos() {
return character;
}

public boolean equals(Object o) {
if ((o == null) || (o.getClass() != this.getClass()))
{
return false;
public short getFontIndex() {
return fontIndex;
}
FormatRun other = ( FormatRun ) o;

return ((character == other.character) && (fontIndex == other.fontIndex));
}
public boolean equals(Object o) {
if (!(o instanceof FormatRun)) {
return false;
}
FormatRun other = ( FormatRun ) o;

public int compareTo(Object obj) {
FormatRun r = (FormatRun)obj;
if ((character == r.character) && (fontIndex == r.fontIndex))
return 0;
if (character == r.character)
return fontIndex - r.fontIndex;
else return character - r.character;
}
return character == other.character && fontIndex == other.fontIndex;
}

public String toString() {
return "character="+character+",fontIndex="+fontIndex;
}
public int compareTo(Object obj) {
FormatRun r = (FormatRun)obj;
if ((character == r.character) && (fontIndex == r.fontIndex))
return 0;
if (character == r.character)
return fontIndex - r.fontIndex;
else return character - r.character;
}

public String toString() {
return "character="+character+",fontIndex="+fontIndex;
}

public void serialize(LittleEndianOutput out) {
out.writeShort(character);
out.writeShort(fontIndex);
}
}

private UnicodeString() {
@@ -116,13 +125,12 @@ public final class UnicodeString implements Comparable {
*/
public boolean equals(Object o)
{
if ((o == null) || (o.getClass() != this.getClass()))
{
if (!(o instanceof UnicodeString)) {
return false;
}
UnicodeString other = ( UnicodeString ) o;
UnicodeString other = (UnicodeString) o;

//Ok lets do this in stages to return a quickly, first check the actual string
//OK lets do this in stages to return a quickly, first check the actual string
boolean eq = ((field_1_charCount == other.field_1_charCount)
&& (field_2_optionflags == other.field_2_optionflags)
&& field_3_string.equals(other.field_3_string));
@@ -148,7 +156,7 @@ public final class UnicodeString implements Comparable {

if (!run1.equals(run2))
return false;
}
}

//Well the format runs are equal as well!, better check the ExtRst data
//Which by the way we dont know how to decode!
@@ -194,19 +202,17 @@ public final class UnicodeString implements Comparable {

boolean isCompressed = ((field_2_optionflags & 1) == 0);
if (isCompressed) {
field_3_string = in.readCompressedUnicode(field_1_charCount);
field_3_string = in.readCompressedUnicode(field_1_charCount);
} else {
field_3_string = in.readUnicodeLEString(field_1_charCount);
field_3_string = in.readUnicodeLEString(field_1_charCount);
}

if (isRichText() && (runCount > 0)) {
field_4_format_runs = new ArrayList(runCount);
for (int i=0;i<runCount;i++) {
field_4_format_runs.add(new FormatRun(in.readShort(), in.readShort()));
//read reserved
//in.readInt();
}
field_4_format_runs.add(new FormatRun(in));
}
}

if (isExtendedText() && (extensionLength > 0)) {
@@ -372,11 +378,8 @@ public final class UnicodeString implements Comparable {
field_2_optionflags = richText.clearByte(field_2_optionflags);
}

public byte[] getExtendedRst() {
return this.field_5_ext_rst;
}

public void setExtendedRst(byte[] ext_rst) {
void setExtendedRst(byte[] ext_rst) {
if (ext_rst != null)
field_2_optionflags = extBit.setByte(field_2_optionflags);
else field_2_optionflags = extBit.clearByte(field_2_optionflags);
@@ -391,13 +394,13 @@ public final class UnicodeString implements Comparable {
* removed / re-ordered
*/
public void swapFontUse(short oldFontIndex, short newFontIndex) {
Iterator i = field_4_format_runs.iterator();
while(i.hasNext()) {
FormatRun run = (FormatRun)i.next();
if(run.fontIndex == oldFontIndex) {
run.fontIndex = newFontIndex;
}
}
Iterator i = field_4_format_runs.iterator();
while(i.hasNext()) {
FormatRun run = (FormatRun)i.next();
if(run.fontIndex == oldFontIndex) {
run.fontIndex = newFontIndex;
}
}
}
/**
@@ -442,353 +445,45 @@ public final class UnicodeString implements Comparable {
return buffer.toString();
}

private int writeContinueIfRequired(UnicodeRecordStats stats, final int requiredSize, int offset, byte[] data) {
//Basic string overhead
if (stats.remainingSize < requiredSize) {
//Check if be are already in a continue record, if so make sure that
//we go back and write out our length
if (stats.lastLengthPos != -1) {
short lastRecordLength = (short)(offset - stats.lastLengthPos - 2);
if (lastRecordLength > 8224)
throw new InternalError();
LittleEndian.putShort(data, stats.lastLengthPos, lastRecordLength);
}

LittleEndian.putShort(data, offset, ContinueRecord.sid);
offset+=2;
//Record the location of the last continue length position, but don't write
//anything there yet (since we don't know what it will be!)
stats.lastLengthPos = offset;
offset += 2;

stats.recordSize += 4;
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
}
return offset;
}

public int serialize(UnicodeRecordStats stats, final int offset, byte [] data)
{
int pos = offset;

//Basic string overhead
pos = writeContinueIfRequired(stats, 3, pos, data);
LittleEndian.putShort(data, pos, getCharCount());
pos += 2;
data[ pos ] = getOptionFlags();
pos += 1;
stats.recordSize += 3;
stats.remainingSize-= 3;

if (isRichText()) {
if (field_4_format_runs != null) {
pos = writeContinueIfRequired(stats, 2, pos, data);

LittleEndian.putShort(data, pos, (short) field_4_format_runs.size());
pos += 2;
stats.recordSize += 2;
stats.remainingSize -= 2;
}
}
if ( isExtendedText() )
{
if (this.field_5_ext_rst != null) {
pos = writeContinueIfRequired(stats, 4, pos, data);

LittleEndian.putInt(data, pos, field_5_ext_rst.length);
pos += 4;
stats.recordSize += 4;
stats.remainingSize -= 4;
}
}

int charsize = isUncompressedUnicode() ? 2 : 1;
int strSize = (getString().length() * charsize);

byte[] strBytes = null;
try {
String unicodeString = getString();
if (!isUncompressedUnicode())
{
strBytes = unicodeString.getBytes("ISO-8859-1");
}
else
{
strBytes = unicodeString.getBytes("UTF-16LE");
}
}
catch (Exception e) {
throw new InternalError();
}
if (strSize != strBytes.length)
throw new InternalError("That shouldnt have happened!");

//Check to see if the offset occurs mid string, if so then we need to add
//the byte to start with that represents the first byte of the continue record.
if (strSize > stats.remainingSize) {
//OK the offset occurs half way through the string, that means that
//we need an extra byte after the continue record ie we didnt finish
//writing out the string the 1st time through

//But hang on, how many continue records did we span? What if this is
//a REALLY long string. We need to work this all out.
int amountThatCantFit = strSize;
int strPos = 0;
while (amountThatCantFit > 0) {
int amountWritten = Math.min(stats.remainingSize, amountThatCantFit);
//Make sure that the amount that can't fit takes into account
//whether we are writing double byte unicode
if (isUncompressedUnicode()) {
//We have the '-1' here because whether this is the first record or
//subsequent continue records, there is always the case that the
//number of bytes in a string on double byte boundaries is actually odd.
if ( ( (amountWritten ) % 2) == 1)
amountWritten--;
}
System.arraycopy(strBytes, strPos, data, pos, amountWritten);
pos += amountWritten;
strPos += amountWritten;
stats.recordSize += amountWritten;
stats.remainingSize -= amountWritten;

//Ok lets subtract what we can write
amountThatCantFit -= amountWritten;

//Each iteration of this while loop is another continue record, unless
//everything now fits.
if (amountThatCantFit > 0) {
//We know that a continue WILL be requied, but use this common method
pos = writeContinueIfRequired(stats, amountThatCantFit, pos, data);

//The first byte after a continue mid string is the extra byte to
//indicate if this run is compressed or not.
data[pos] = (byte) (isUncompressedUnicode() ? 0x1 : 0x0);
pos++;
stats.recordSize++;
stats.remainingSize --;
}
}
} else {
if (strSize > (data.length-pos))
System.out.println("Hmm shouldnt happen");
//Ok the string fits nicely in the remaining size
System.arraycopy(strBytes, 0, data, pos, strSize);
pos += strSize;
stats.recordSize += strSize;
stats.remainingSize -= strSize;
}


if (isRichText() && (field_4_format_runs != null)) {
int count = field_4_format_runs.size();

//This will ensure that a run does not split a continue
for (int i=0;i<count;i++) {
pos = writeContinueIfRequired(stats, 4, pos, data);
FormatRun r = (FormatRun)field_4_format_runs.get(i);
LittleEndian.putShort(data, pos, r.character);
pos += 2;
LittleEndian.putShort(data, pos, r.fontIndex);
pos += 2;

//Each run count is four bytes
stats.recordSize += 4;
stats.remainingSize -=4;
public void serialize(ContinuableRecordOutput out) {
int numberOfRichTextRuns = 0;
int extendedDataSize = 0;
if (isRichText() && field_4_format_runs != null) {
numberOfRichTextRuns = field_4_format_runs.size();
}
}

if (isExtendedText() && (field_5_ext_rst != null)) {
//Ok ExtRst is actually not documented, so i am going to hope
//that we can actually continue on byte boundaries
int ammountThatCantFit = field_5_ext_rst.length - stats.remainingSize;
int extPos = 0;
if (ammountThatCantFit > 0) {
while (ammountThatCantFit > 0) {
//So for this record we have already written
int ammountWritten = Math.min(stats.remainingSize, ammountThatCantFit);
System.arraycopy(field_5_ext_rst, extPos, data, pos, ammountWritten);
pos += ammountWritten;
extPos += ammountWritten;
stats.recordSize += ammountWritten;
stats.remainingSize -= ammountWritten;

//Ok lets subtract what we can write
ammountThatCantFit -= ammountWritten;
if (ammountThatCantFit > 0) {
pos = writeContinueIfRequired(stats, 1, pos, data);
}
}
} else {
//We can fit wholey in what remains.
System.arraycopy(field_5_ext_rst, 0, data, pos, field_5_ext_rst.length);
pos += field_5_ext_rst.length;
stats.remainingSize -= field_5_ext_rst.length;
stats.recordSize += field_5_ext_rst.length;
if (isExtendedText() && field_5_ext_rst != null) {
extendedDataSize = field_5_ext_rst.length;
}
}

return pos - offset;
}


public void setCompressedUnicode() {
field_2_optionflags = highByte.setByte(field_2_optionflags);
}

public void setUncompressedUnicode() {
field_2_optionflags = highByte.clearByte(field_2_optionflags);
}

private boolean isUncompressedUnicode()
{
return highByte.isSet(getOptionFlags());
}

/** Returns the size of this record, given the amount of record space
* remaining, it will also include the size of writing a continue record.
*/

public static class UnicodeRecordStats {
public int recordSize;
public int remainingSize = SSTRecord.MAX_RECORD_SIZE;
public int lastLengthPos = -1;
}
public void getRecordSize(UnicodeRecordStats stats) {
//Basic string overhead
if (stats.remainingSize < 3) {
//Needs a continue
stats.recordSize += 4;
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
}
stats.recordSize += 3;
stats.remainingSize-= 3;

//Read the number of rich runs if rich text.
if ( isRichText() )
{
//Run count
if (stats.remainingSize < 2) {
//Needs a continue
//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize+=4;
}

stats.recordSize += 2;
stats.remainingSize -=2;
}
//Read the size of extended data if present.
if ( isExtendedText() )
{
//Needs a continue
//extension length
if (stats.remainingSize < 4) {
//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize+=4;
}

stats.recordSize += 4;
stats.remainingSize -=4;
}

int charsize = isUncompressedUnicode() ? 2 : 1;
int strSize = (getString().length() * charsize);
//Check to see if the offset occurs mid string, if so then we need to add
//the byte to start with that represents the first byte of the continue record.
if (strSize > stats.remainingSize) {
//Ok the offset occurs half way through the string, that means that
//we need an extra byte after the continue record ie we didnt finish
//writing out the string the 1st time through

//But hang on, how many continue records did we span? What if this is
//a REALLY long string. We need to work this all out.
int ammountThatCantFit = strSize;
while (ammountThatCantFit > 0) {
int ammountWritten = Math.min(stats.remainingSize, ammountThatCantFit);
//Make sure that the ammount that cant fit takes into account
//whether we are writing double byte unicode
if (isUncompressedUnicode()) {
//We have the '-1' here because whether this is the first record or
//subsequent continue records, there is always the case that the
//number of bytes in a string on doube byte boundaries is actually odd.
if ( ( (ammountWritten) % 2) == 1)
ammountWritten--;
}
stats.recordSize += ammountWritten;
stats.remainingSize -= ammountWritten;

//Ok lets subtract what we can write
ammountThatCantFit -= ammountWritten;

//Each iteration of this while loop is another continue record, unless
//everything now fits.
if (ammountThatCantFit > 0) {
//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize+=4;

//The first byte after a continue mid string is the extra byte to
//indicate if this run is compressed or not.
stats.recordSize++;
stats.remainingSize --;
}
}
} else {
//Ok the string fits nicely in the remaining size
stats.recordSize += strSize;
stats.remainingSize -= strSize;
}
out.writeString(field_3_string, numberOfRichTextRuns, extendedDataSize);

if (isRichText() && (field_4_format_runs != null)) {
int count = field_4_format_runs.size();
if (numberOfRichTextRuns > 0) {

//This will ensure that a run does not split a continue
for (int i=0;i<count;i++) {
if (stats.remainingSize < 4) {
//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize+=4;
//This will ensure that a run does not split a continue
for (int i=0;i<numberOfRichTextRuns;i++) {
if (out.getAvailableSpace() < 4) {
out.writeContinue();
}
FormatRun r = (FormatRun)field_4_format_runs.get(i);
r.serialize(out);
}

//Each run count is four bytes
stats.recordSize += 4;
stats.remainingSize -=4;
}
}

if (isExtendedText() && (field_5_ext_rst != null)) {
//Ok ExtRst is actually not documented, so i am going to hope
//that we can actually continue on byte boundaries
int ammountThatCantFit = field_5_ext_rst.length - stats.remainingSize;
if (ammountThatCantFit > 0) {
while (ammountThatCantFit > 0) {
//So for this record we have already written
int ammountWritten = Math.min(stats.remainingSize, ammountThatCantFit);
stats.recordSize += ammountWritten;
stats.remainingSize -= ammountWritten;

//Ok lets subtract what we can write
ammountThatCantFit -= ammountWritten;
if (ammountThatCantFit > 0) {
//Each iteration of this while loop is another continue record.

//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize += 4;
if (extendedDataSize > 0) {
// OK ExtRst is actually not documented, so i am going to hope
// that we can actually continue on byte boundaries

int extPos = 0;
while (true) {
int nBytesToWrite = Math.min(extendedDataSize - extPos, out.getAvailableSpace());
out.write(field_5_ext_rst, extPos, nBytesToWrite);
extPos += nBytesToWrite;
if (extPos >= extendedDataSize) {
break;
}
out.writeContinue();
}
}
} else {
//We can fit wholey in what remains.
stats.remainingSize -= field_5_ext_rst.length;
stats.recordSize += field_5_ext_rst.length;
}
}
}

public int compareTo(Object obj)
@@ -801,9 +496,9 @@ public final class UnicodeString implements Comparable {
if (result != 0)
return result;

//Ok string appears to be equal but now lets compare formatting runs
//OK string appears to be equal but now lets compare formatting runs
if ((field_4_format_runs == null) && (str.field_4_format_runs == null))
//Strings are equal, and there are no formtting runs.
//Strings are equal, and there are no formatting runs.
return 0;

if ((field_4_format_runs == null) && (str.field_4_format_runs != null))
@@ -850,12 +545,12 @@ public final class UnicodeString implements Comparable {
return 0;
}

public boolean isRichText()
private boolean isRichText()
{
return richText.isSet(getOptionFlags());
}

public boolean isExtendedText()
private boolean isExtendedText()
{
return extBit.isSet(getOptionFlags());
}
@@ -877,10 +572,8 @@ public final class UnicodeString implements Comparable {
str.field_5_ext_rst = new byte[field_5_ext_rst.length];
System.arraycopy(field_5_ext_rst, 0, str.field_5_ext_rst, 0,
field_5_ext_rst.length);
}
}

return str;
}


}

+ 6
- 10
src/java/org/apache/poi/hssf/record/constant/ConstantValueParser.java Visa fil

@@ -17,8 +17,6 @@

package org.apache.poi.hssf.record.constant;

import org.apache.poi.hssf.record.UnicodeString;
import org.apache.poi.hssf.record.UnicodeString.UnicodeRecordStats;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
@@ -65,7 +63,7 @@ public final class ConstantValueParser {
case TYPE_NUMBER:
return new Double(in.readDouble());
case TYPE_STRING:
return new UnicodeString(StringUtil.readUnicodeString(in));
return StringUtil.readUnicodeString(in);
case TYPE_BOOLEAN:
return readBoolean(in);
case TYPE_ERROR_CODE:
@@ -111,10 +109,8 @@ public final class ConstantValueParser {
if(cls == Boolean.class || cls == Double.class || cls == ErrorConstant.class) {
return 8;
}
UnicodeString strVal = (UnicodeString)object;
UnicodeRecordStats urs = new UnicodeRecordStats();
strVal.getRecordSize(urs);
return urs.recordSize;
String strVal = (String)object;
return StringUtil.getEncodedSize(strVal);
}

public static void encode(LittleEndianOutput out, Object[] values) {
@@ -142,10 +138,10 @@ public final class ConstantValueParser {
out.writeDouble(dVal.doubleValue());
return;
}
if (value instanceof UnicodeString) {
UnicodeString usVal = (UnicodeString) value;
if (value instanceof String) {
String val = (String) value;
out.writeByte(TYPE_STRING);
StringUtil.writeUnicodeString(out, usVal.getString());
StringUtil.writeUnicodeString(out, val);
return;
}
if (value instanceof ErrorConstant) {

+ 69
- 0
src/java/org/apache/poi/hssf/record/cont/ContinuableRecord.java Visa fil

@@ -0,0 +1,69 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.cont;
import org.apache.poi.hssf.record.ContinueRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndianOutput;
/**
* Common superclass of all records that can produce {@link ContinueRecord}s while being serialized.
*
* @author Josh Micich
*/
public abstract class ContinuableRecord extends Record {
protected ContinuableRecord() {
// no fields to initialise
}
/**
* Serializes this record's content to the supplied data output.<br/>
* The standard BIFF header (ushort sid, ushort size) has been handled by the superclass, so
* only BIFF data should be written by this method. Simple data types can be written with the
* standard {@link LittleEndianOutput} methods. Methods from {@link ContinuableRecordOutput}
* can be used to serialize strings (with {@link ContinueRecord}s being written as required).
* If necessary, implementors can explicitly start {@link ContinueRecord}s (regardless of the
* amount of remaining space).
*
* @param out a data output stream
*/
protected abstract void serialize(ContinuableRecordOutput out);
/**
* @return four less than the total length of the encoded record(s)
* (in the case when no {@link ContinueRecord} is needed, this is the
* same ushort value that gets encoded after the record sid
*/
protected final int getDataSize() {
ContinuableRecordOutput out = ContinuableRecordOutput.createForCountingOnly();
serialize(out);
out.terminate();
return out.getTotalSize() - 4;
}
public final int serialize(int offset, byte[] data) {
LittleEndianOutput leo = new LittleEndianByteArrayOutputStream(data, offset);
ContinuableRecordOutput out = new ContinuableRecordOutput(leo, getSid());
serialize(out);
out.terminate();
return out.getTotalSize();
}
}

+ 257
- 0
src/java/org/apache/poi/hssf/record/cont/ContinuableRecordOutput.java Visa fil

@@ -0,0 +1,257 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.cont;
import org.apache.poi.hssf.record.ContinueRecord;
import org.apache.poi.util.DelayableLittleEndianOutput;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
/**
* An augmented {@link LittleEndianOutput} used for serialization of {@link ContinuableRecord}s.
* This class keeps track of how much remaining space is available in the current BIFF record and
* can start new {@link ContinueRecord}s as required.
*
* @author Josh Micich
*/
public final class ContinuableRecordOutput implements LittleEndianOutput {
private final LittleEndianOutput _out;
private UnknownLengthRecordOutput _ulrOutput;
private int _totalPreviousRecordsSize;
ContinuableRecordOutput(LittleEndianOutput out, int sid) {
_ulrOutput = new UnknownLengthRecordOutput(out, sid);
_out = out;
_totalPreviousRecordsSize = 0;
}
public static ContinuableRecordOutput createForCountingOnly() {
return new ContinuableRecordOutput(NOPOutput, -777); // fake sid
}
/**
* @return total number of bytes written so far (including all BIFF headers)
*/
public int getTotalSize() {
return _totalPreviousRecordsSize + _ulrOutput.getTotalSize();
}
/**
* Terminates the last record (also updates its 'ushort size' field)
*/
void terminate() {
_ulrOutput.terminate();
}
/**
* @return number of remaining bytes of space in current record
*/
public int getAvailableSpace() {
return _ulrOutput.getAvailableSpace();
}
/**
* Terminates the current record and starts a new {@link ContinueRecord} (regardless
* of how much space is still available in the current record).
*/
public void writeContinue() {
_ulrOutput.terminate();
_totalPreviousRecordsSize += _ulrOutput.getTotalSize();
_ulrOutput = new UnknownLengthRecordOutput(_out, ContinueRecord.sid);
}
public void writeContinueIfRequired(int requiredContinuousSize) {
if (_ulrOutput.getAvailableSpace() < requiredContinuousSize) {
writeContinue();
}
}
/**
* Writes the 'optionFlags' byte and encoded character data of a unicode string. This includes:
* <ul>
* <li>byte optionFlags</li>
* <li>encoded character data (in "ISO-8859-1" or "UTF-16LE" encoding)</li>
* </ul>
*
* Notes:
* <ul>
* <li>The value of the 'is16bitEncoded' flag is determined by the actual character data
* of <tt>text</tt></li>
* <li>The string options flag is never separated (by a {@link ContinueRecord}) from the
* first chunk of character data it refers to.</li>
* <li>The 'ushort length' field is assumed to have been explicitly written earlier. Hence,
* there may be an intervening {@link ContinueRecord}</li>
* </ul>
*/
public void writeStringData(String text) {
boolean is16bitEncoded = StringUtil.hasMultibyte(text);
// calculate total size of the header and first encoded char
int keepTogetherSize = 1 + 1; // ushort len, at least one character byte
int optionFlags = 0x00;
if (is16bitEncoded) {
optionFlags |= 0x01;
keepTogetherSize += 1; // one extra byte for first char
}
writeContinueIfRequired(keepTogetherSize);
writeByte(optionFlags);
writeCharacterData(text, is16bitEncoded);
}
/**
* Writes a unicode string complete with header and character data. This includes:
* <ul>
* <li>ushort length</li>
* <li>byte optionFlags</li>
* <li>ushort numberOfRichTextRuns (optional)</li>
* <li>ushort extendedDataSize (optional)</li>
* <li>encoded character data (in "ISO-8859-1" or "UTF-16LE" encoding)</li>
* </ul>
*
* The following bits of the 'optionFlags' byte will be set as appropriate:
* <table border='1'>
* <tr><th>Mask</th><th>Description</th></tr>
* <tr><td>0x01</td><td>is16bitEncoded</td></tr>
* <tr><td>0x04</td><td>hasExtendedData</td></tr>
* <tr><td>0x08</td><td>isRichText</td></tr>
* </table>
* Notes:
* <ul>
* <li>The value of the 'is16bitEncoded' flag is determined by the actual character data
* of <tt>text</tt></li>
* <li>The string header fields are never separated (by a {@link ContinueRecord}) from the
* first chunk of character data (i.e. the first character is always encoded in the same
* record as the string header).</li>
* </ul>
*/
public void writeString(String text, int numberOfRichTextRuns, int extendedDataSize) {
boolean is16bitEncoded = StringUtil.hasMultibyte(text);
// calculate total size of the header and first encoded char
int keepTogetherSize = 2 + 1 + 1; // ushort len, byte optionFlags, at least one character byte
int optionFlags = 0x00;
if (is16bitEncoded) {
optionFlags |= 0x01;
keepTogetherSize += 1; // one extra byte for first char
}
if (numberOfRichTextRuns > 0) {
optionFlags |= 0x08;
keepTogetherSize += 2;
}
if (extendedDataSize > 0) {
optionFlags |= 0x04;
keepTogetherSize += 4;
}
writeContinueIfRequired(keepTogetherSize);
writeShort(text.length());
writeByte(optionFlags);
if (numberOfRichTextRuns > 0) {
writeShort(numberOfRichTextRuns);
}
if (extendedDataSize > 0) {
writeInt(extendedDataSize);
}
writeCharacterData(text, is16bitEncoded);
}
private void writeCharacterData(String text, boolean is16bitEncoded) {
int nChars = text.length();
int i=0;
if (is16bitEncoded) {
while(true) {
int nWritableChars = Math.min(nChars-i, _ulrOutput.getAvailableSpace() / 2);
for ( ; nWritableChars > 0; nWritableChars--) {
_ulrOutput.writeShort(text.charAt(i++));
}
if (i >= nChars) {
break;
}
writeContinue();
writeByte(0x01);
}
} else {
while(true) {
int nWritableChars = Math.min(nChars-i, _ulrOutput.getAvailableSpace() / 1);
for ( ; nWritableChars > 0; nWritableChars--) {
_ulrOutput.writeByte(text.charAt(i++));
}
if (i >= nChars) {
break;
}
writeContinue();
writeByte(0x00);
}
}
}
public void write(byte[] b) {
writeContinueIfRequired(b.length);
_ulrOutput.write(b);
}
public void write(byte[] b, int offset, int len) {
writeContinueIfRequired(len);
_ulrOutput.write(b, offset, len);
}
public void writeByte(int v) {
writeContinueIfRequired(1);
_ulrOutput.writeByte(v);
}
public void writeDouble(double v) {
writeContinueIfRequired(8);
_ulrOutput.writeDouble(v);
}
public void writeInt(int v) {
writeContinueIfRequired(4);
_ulrOutput.writeInt(v);
}
public void writeLong(long v) {
writeContinueIfRequired(8);
_ulrOutput.writeLong(v);
}
public void writeShort(int v) {
writeContinueIfRequired(2);
_ulrOutput.writeShort(v);
}
/**
* Allows optimised usage of {@link ContinuableRecordOutput} for sizing purposes only.
*/
private static final LittleEndianOutput NOPOutput = new DelayableLittleEndianOutput() {
public LittleEndianOutput createDelayedOutput(int size) {
return this;
}
public void write(byte[] b) {
// does nothing
}
public void write(byte[] b, int offset, int len) {
// does nothing
}
public void writeByte(int v) {
// does nothing
}
public void writeDouble(double v) {
// does nothing
}
public void writeInt(int v) {
// does nothing
}
public void writeLong(long v) {
// does nothing
}
public void writeShort(int v) {
// does nothing
}
};
}

+ 114
- 0
src/java/org/apache/poi/hssf/record/cont/UnknownLengthRecordOutput.java Visa fil

@@ -0,0 +1,114 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.cont;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.util.DelayableLittleEndianOutput;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndianOutput;
/**
* Allows the writing of BIFF records when the 'ushort size' header field is not known in advance.
* When the client is finished writing data, it calls {@link #terminate()}, at which point this
* class updates the 'ushort size' with its final value.
*
* @author Josh Micich
*/
final class UnknownLengthRecordOutput implements LittleEndianOutput {
private static final int MAX_DATA_SIZE = RecordInputStream.MAX_RECORD_DATA_SIZE;
private final LittleEndianOutput _originalOut;
/** for writing the 'ushort size' field once its value is known */
private final LittleEndianOutput _dataSizeOutput;
private final byte[] _byteBuffer;
private LittleEndianOutput _out;
private int _size;
public UnknownLengthRecordOutput(LittleEndianOutput out, int sid) {
_originalOut = out;
out.writeShort(sid);
if (out instanceof DelayableLittleEndianOutput) {
// optimisation
DelayableLittleEndianOutput dleo = (DelayableLittleEndianOutput) out;
_dataSizeOutput = dleo.createDelayedOutput(2);
_byteBuffer = null;
_out = out;
} else {
// otherwise temporarily write all subsequent data to a buffer
_dataSizeOutput = out;
_byteBuffer = new byte[RecordInputStream.MAX_RECORD_DATA_SIZE];
_out = new LittleEndianByteArrayOutputStream(_byteBuffer, 0);
}
}
/**
* includes 4 byte header
*/
public int getTotalSize() {
return 4 + _size;
}
public int getAvailableSpace() {
if (_out == null) {
throw new IllegalStateException("Record already terminated");
}
return MAX_DATA_SIZE - _size;
}
/**
* Finishes writing the current record and updates 'ushort size' field.<br/>
* After this method is called, only {@link #getTotalSize()} may be called.
*/
public void terminate() {
if (_out == null) {
throw new IllegalStateException("Record already terminated");
}
_dataSizeOutput.writeShort(_size);
if (_byteBuffer != null) {
_originalOut.write(_byteBuffer, 0, _size);
_out = null;
return;
}
_out = null;
}
public void write(byte[] b) {
_out.write(b);
_size += b.length;
}
public void write(byte[] b, int offset, int len) {
_out.write(b, offset, len);
_size += len;
}
public void writeByte(int v) {
_out.writeByte(v);
_size += 1;
}
public void writeDouble(double v) {
_out.writeDouble(v);
_size += 8;
}
public void writeInt(int v) {
_out.writeInt(v);
_size += 4;
}
public void writeLong(long v) {
_out.writeLong(v);
_size += 8;
}
public void writeShort(int v) {
_out.writeShort(v);
_size += 2;
}
}

+ 2
- 2
src/java/org/apache/poi/hssf/record/formula/ArrayPtg.java Visa fil

@@ -206,8 +206,8 @@ public final class ArrayPtg extends Ptg {
if (o == null) {
throw new RuntimeException("Array item cannot be null");
}
if (o instanceof UnicodeString) {
return "\"" + ((UnicodeString)o).getString() + "\"";
if (o instanceof String) {
return "\"" + (String)o + "\"";
}
if (o instanceof Double) {
return ((Double)o).toString();

+ 38
- 32
src/java/org/apache/poi/hssf/usermodel/HSSFCell.java Visa fil

@@ -43,7 +43,6 @@ import org.apache.poi.hssf.record.NumberRecord;
import org.apache.poi.hssf.record.ObjRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordBase;
import org.apache.poi.hssf.record.StringRecord;
import org.apache.poi.hssf.record.SubRecord;
import org.apache.poi.hssf.record.TextObjectRecord;
import org.apache.poi.hssf.record.UnicodeString;
@@ -257,7 +256,7 @@ public class HSSFCell implements Cell {
}
public int getColumnIndex() {
return record.getColumn() & 0xFFFF;
return record.getColumn() & 0xFFFF;
}

/**
@@ -336,38 +335,23 @@ public class HSSFCell implements Cell {
break;

case CELL_TYPE_STRING :
LabelSSTRecord lrec = null;
LabelSSTRecord lrec;

if (cellType != this.cellType)
{
if (cellType == this.cellType) {
lrec = (LabelSSTRecord) record;
} else {
lrec = new LabelSSTRecord();
lrec.setColumn(col);
lrec.setRow(row);
lrec.setXFIndex(styleIndex);
}
else
{
lrec = ( LabelSSTRecord ) record;
}
lrec.setColumn(col);
lrec.setRow(row);
lrec.setXFIndex(styleIndex);
if (setValue)
{
if ((getStringCellValue() != null)
&& (!getStringCellValue().equals("")))
{
int sst = 0;

UnicodeString str = getRichStringCellValue().getUnicodeString();
//jmh if (encoding == ENCODING_COMPRESSED_UNICODE)
//jmh {
// jmh str.setCompressedUnicode();
// jmh } else if (encoding == ENCODING_UTF_16)
// jmh {
// jmh str.setUncompressedUnicode();
// jmh }
sst = book.getWorkbook().addSSTString(str);
lrec.setSSTIndex(sst);
getRichStringCellValue().setUnicodeString(book.getWorkbook().getSSTString(sst));
}
if (setValue) {
String str = convertCellValueToString();
int sstIndex = book.getWorkbook().addSSTString(new UnicodeString(str));
lrec.setSSTIndex(sstIndex);
UnicodeString us = book.getWorkbook().getSSTString(sstIndex);
stringValue = new HSSFRichTextString();
stringValue.setUnicodeString(us);
}
record = lrec;
break;
@@ -782,7 +766,9 @@ public class HSSFCell implements Cell {
case CELL_TYPE_BOOLEAN:
return (( BoolErrRecord ) record).getBooleanValue();
case CELL_TYPE_STRING:
return Boolean.valueOf(((StringRecord)record).getString()).booleanValue();
int sstIndex = ((LabelSSTRecord)record).getSSTIndex();
String text = book.getWorkbook().getSSTString(sstIndex).getString();
return Boolean.valueOf(text).booleanValue();
case CELL_TYPE_NUMERIC:
return ((NumberRecord)record).getValue() != 0;

@@ -796,6 +782,26 @@ public class HSSFCell implements Cell {
}
throw new RuntimeException("Unexpected cell type (" + cellType + ")");
}
private String convertCellValueToString() {

switch (cellType) {
case CELL_TYPE_BLANK:
return "";
case CELL_TYPE_BOOLEAN:
return ((BoolErrRecord) record).getBooleanValue() ? "TRUE" : "FALSE";
case CELL_TYPE_STRING:
int sstIndex = ((LabelSSTRecord)record).getSSTIndex();
return book.getWorkbook().getSSTString(sstIndex).getString();
case CELL_TYPE_NUMERIC:
return String.valueOf(((NumberRecord)record).getValue());
case CELL_TYPE_ERROR:
return HSSFErrorConstants.getText(((BoolErrRecord) record).getErrorValue());
case CELL_TYPE_FORMULA:
// should really evaluate, but HSSFCell can't call HSSFFormulaEvaluator
return "";
}
throw new RuntimeException("Unexpected cell type (" + cellType + ")");
}

/**
* get the value of the cell as a boolean. For strings, numbers, and errors, we throw an exception.

+ 34
- 0
src/java/org/apache/poi/util/DelayableLittleEndianOutput.java Visa fil

@@ -0,0 +1,34 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.util;
/**
* Implementors of this interface allow client code to 'delay' writing to a certain section of a
* data output stream.<br/>
* A typical application is for writing BIFF records when the size is not known until well after
* the header has been written. The client code can call {@link #createDelayedOutput(int)}
* to reserve two bytes of the output for the 'ushort size' header field. The delayed output can
* be written at any stage.
*
* @author Josh Micich
*/
public interface DelayableLittleEndianOutput extends LittleEndianOutput {
/**
* Creates an output stream intended for outputting a sequence of <tt>size</tt> bytes.
*/
LittleEndianOutput createDelayedOutput(int size);
}

+ 7
- 1
src/java/org/apache/poi/util/LittleEndianByteArrayOutputStream.java Visa fil

@@ -24,7 +24,7 @@ package org.apache.poi.util;
*
* @author Josh Micich
*/
public final class LittleEndianByteArrayOutputStream implements LittleEndianOutput {
public final class LittleEndianByteArrayOutputStream implements LittleEndianOutput, DelayableLittleEndianOutput {
private final byte[] _buf;
private final int _endIndex;
private int _writeIndex;
@@ -89,4 +89,10 @@ public final class LittleEndianByteArrayOutputStream implements LittleEndianOutp
public int getWriteIndex() {
return _writeIndex;
}
public LittleEndianOutput createDelayedOutput(int size) {
checkPosition(size);
LittleEndianOutput result = new LittleEndianByteArrayOutputStream(_buf, _writeIndex, _writeIndex+size);
_writeIndex += size;
return result;
}
}

+ 9
- 0
src/java/org/apache/poi/util/StringUtil.java Visa fil

@@ -162,6 +162,15 @@ public class StringUtil {
}
}

/**
* @return the number of bytes that would be written by {@link #writeUnicodeString(LittleEndianOutput, String)}
*/
public static int getEncodedSize(String value) {
int result = 2 + 1;
result += value.length() * (StringUtil.hasMultibyte(value) ? 2 : 1);
return result;
}

/**
* Takes a unicode (java) string, and returns it as 8 bit data (in ISO-8859-1
* codepage).

+ 6
- 2
src/scratchpad/src/org/apache/poi/hslf/model/Picture.java Visa fil

@@ -196,10 +196,14 @@ public class Picture extends SimpleShape {
Document doc = ppt.getDocumentRecord();
EscherContainerRecord dggContainer = doc.getPPDrawingGroup().getDggContainer();
EscherContainerRecord bstore = (EscherContainerRecord)Shape.getEscherChild(dggContainer, EscherContainerRecord.BSTORE_CONTAINER);

if(bstore == null) {
logger.log(POILogger.DEBUG, "EscherContainerRecord.BSTORE_CONTAINER was not found ");
return null;
}
List lst = bstore.getChildRecords();
int idx = getPictureIndex();
if (idx == 0){
logger.log(POILogger.DEBUG, "picture index was not found, returning ");
return null;
} else {
return (EscherBSERecord)lst.get(idx-1);
@@ -263,7 +267,7 @@ public class Picture extends SimpleShape {
ShapePainter.paint(this, graphics);

PictureData data = getPictureData();
data.draw(graphics, this);
if(data != null) data.draw(graphics, this);

graphics.setTransform(at);
}

+ 2
- 2
src/scratchpad/src/org/apache/poi/hslf/model/TableCell.java Visa fil

@@ -56,8 +56,8 @@ public class TableCell extends TextBox {
super(parent);
setShapeType(ShapeTypes.Rectangle);
_txtrun.setRunType(TextHeaderAtom.HALF_BODY_TYPE);
_txtrun.getRichTextRuns()[0].setFlag(false, 0, false);
//_txtrun.setRunType(TextHeaderAtom.HALF_BODY_TYPE);
//_txtrun.getRichTextRuns()[0].setFlag(false, 0, false);
}
protected EscherContainerRecord createSpContainer(boolean isChild){

+ 23
- 0
src/scratchpad/testcases/org/apache/poi/hslf/model/TestPicture.java Visa fil

@@ -20,9 +20,12 @@ import junit.framework.*;
import java.io.FileOutputStream;
import java.io.File;
import java.io.IOException;
import java.awt.*;
import java.awt.image.BufferedImage;
import org.apache.poi.hslf.usermodel.SlideShow;
import org.apache.poi.hslf.usermodel.PictureData;
import org.apache.poi.hslf.HSLFSlideShow;
import org.apache.poi.ddf.EscherBSERecord;
@@ -70,4 +73,24 @@ public class TestPicture extends TestCase {
}
/**
* Picture#getEscherBSERecord threw NullPointerException if EscherContainerRecord.BSTORE_CONTAINER
* was not found. The correct behaviour is to return null.
*/
public void test46122() throws IOException {
SlideShow ppt = new SlideShow();
Slide slide = ppt.createSlide();
Picture pict = new Picture(-1); //index to non-existing picture data
pict.setSheet(slide);
PictureData data = pict.getPictureData();
assertNull(data);
BufferedImage img = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB);
Graphics2D graphics = img.createGraphics();
pict.draw(graphics);
assertTrue("no errors rendering Picture with null data", true);
}
}

+ 5
- 0
src/scratchpad/testcases/org/apache/poi/hslf/model/TestTable.java Visa fil

@@ -24,6 +24,7 @@ import java.awt.geom.Rectangle2D;
import org.apache.poi.hslf.usermodel.SlideShow;
import org.apache.poi.hslf.HSLFSlideShow;
import org.apache.poi.hslf.record.TextHeaderAtom;
/**
* Test <code>Table</code> object.
@@ -43,6 +44,10 @@ public class TestTable extends TestCase {
Table tbl = new Table(2, 5);
slide.addShape(tbl);
TableCell cell = tbl.getCell(0, 0);
//table cells have type=TextHeaderAtom.OTHER_TYPE, see bug #46033
assertEquals(TextHeaderAtom.OTHER_TYPE, cell.getTextRun().getRunType());
assertTrue(slide.getShapes()[0] instanceof Table);
Table tbl2 = (Table)slide.getShapes()[0];
assertEquals(tbl.getNumberOfColumns(), tbl2.getNumberOfColumns());

+ 23
- 26
src/testcases/org/apache/poi/hssf/record/TestRecordFactory.java Visa fil

@@ -48,7 +48,6 @@ public final class TestRecordFactory extends TestCase {
byte[] data = {
0, 6, 5, 0, -2, 28, -51, 7, -55, 64, 0, 0, 6, 1, 0, 0
};
short size = 16;
Record[] record = RecordFactory.createRecord(TestcaseRecordInputStream.create(recType, data));

assertEquals(BOFRecord.class.getName(),
@@ -64,7 +63,6 @@ public final class TestRecordFactory extends TestCase {
assertEquals(5, bofRecord.getType());
assertEquals(1536, bofRecord.getVersion());
recType = MMSRecord.sid;
size = 2;
data = new byte[]
{
0, 0
@@ -93,7 +91,6 @@ public final class TestRecordFactory extends TestCase {
byte[] data = {
0, 0, 0, 0, 21, 0, 0, 0, 0, 0
};
short size = 10;
Record[] record = RecordFactory.createRecord(TestcaseRecordInputStream.create(recType, data));

assertEquals(NumberRecord.class.getName(),
@@ -154,34 +151,34 @@ public final class TestRecordFactory extends TestCase {
*/
public void testMixedContinue() throws Exception {
/**
* Taken from a real test sample file 39512.xls. See Bug 39512 for details.
* Adapted from a real test sample file 39512.xls (Offset 0x4854).
* See Bug 39512 for details.
*/
String dump =
//OBJ
"5D, 00, 48, 00, 15, 00, 12, 00, 0C, 00, 3C, 00, 11, 00, A0, 2E, 03, 01, CC, 42, " +
"CF, 00, 00, 00, 00, 00, 0A, 00, 0C, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, " +
"03, 00, 0B, 00, 06, 00, 28, 01, 03, 01, 00, 00, 12, 00, 08, 00, 00, 00, 00, 00, " +
"00, 00, 03, 00, 11, 00, 04, 00, 3D, 00, 00, 00, 00, 00, 00, 00, " +
"5D 00 48 00 15 00 12 00 0C 00 3C 00 11 00 A0 2E 03 01 CC 42 " +
"CF 00 00 00 00 00 0A 00 0C 00 00 00 00 00 00 00 00 00 00 00 " +
"03 00 0B 00 06 00 28 01 03 01 00 00 12 00 08 00 00 00 00 00 " +
"00 00 03 00 11 00 04 00 3D 00 00 00 00 00 00 00 " +
//MSODRAWING
"EC, 00, 08, 00, 00, 00, 0D, F0, 00, 00, 00, 00, " +
//TXO
"B6, 01, 12, 00, 22, 02, 00, 00, 00, 00, 00, 00, 00, 00, 10, 00, 10, 00, 00, 00, " +
"00, 00, 3C, 00, 21, 00, 01, 4F, 00, 70, 00, 74, 00, 69, 00, 6F, 00, 6E, 00, 20, " +
"00, 42, 00, 75, 00, 74, 00, 74, 00, 6F, 00, 6E, 00, 20, 00, 33, 00, 39, 00, 3C, " +
"00, 10, 00, 00, 00, 05, 00, 00, 00, 00, 00, 10, 00, 00, 00, 00, 00, 00, 00, " +
//CONTINUE
"3C, 00, 7E, 00, 0F, 00, 04, F0, 7E, 00, 00, 00, 92, 0C, 0A, F0, 08, 00, 00, 00, " +
"3D, 04, 00, 00, 00, 0A, 00, 00, A3, 00, 0B, F0, 3C, 00, 00, 00, 7F, 00, 00, 01, " +
"00, 01, 80, 00, 8C, 01, 03, 01, 85, 00, 01, 00, 00, 00, 8B, 00, 02, 00, 00, 00, " +
"BF, 00, 08, 00, 1A, 00, 7F, 01, 29, 00, 29, 00, 81, 01, 41, 00, 00, 08, BF, 01, " +
"00, 00, 10, 00, C0, 01, 40, 00, 00, 08, FF, 01, 00, 00, 08, 00, 00, 00, 10, F0, " +
"12, 00, 00, 00, 02, 00, 02, 00, A0, 03, 18, 00, B5, 00, 04, 00, 30, 02, 1A, 00, " +
"00, 00, 00, 00, 11, F0, 00, 00, 00, 00, " +
"EC 00 08 00 00 00 0D F0 00 00 00 00 " +
//TXO (and 2 trailing CONTINUE records)
"B6 01 12 00 22 02 00 00 00 00 00 00 00 00 10 00 10 00 00 00 00 00 " +
"3C 00 11 00 00 4F 70 74 69 6F 6E 20 42 75 74 74 6F 6E 20 33 39 " +
"3C 00 10 00 00 00 05 00 00 00 00 00 10 00 00 00 00 00 00 00 " +
// another CONTINUE
"3C 00 7E 00 0F 00 04 F0 7E 00 00 00 92 0C 0A F0 08 00 00 00 " +
"3D 04 00 00 00 0A 00 00 A3 00 0B F0 3C 00 00 00 7F 00 00 01 " +
"00 01 80 00 8C 01 03 01 85 00 01 00 00 00 8B 00 02 00 00 00 " +
"BF 00 08 00 1A 00 7F 01 29 00 29 00 81 01 41 00 00 08 BF 01 " +
"00 00 10 00 C0 01 40 00 00 08 FF 01 00 00 08 00 00 00 10 F0 " +
"12 00 00 00 02 00 02 00 A0 03 18 00 B5 00 04 00 30 02 1A 00 " +
"00 00 00 00 11 F0 00 00 00 00 " +
//OBJ
"5D, 00, 48, 00, 15, 00, 12, 00, 0C, 00, 3D, 00, 11, 00, 8C, 01, 03, 01, C8, 59, CF, 00, 00, " +
"00, 00, 00, 0A, 00, 0C, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 03, 00, 0B, 00, 06, 00, " +
"7C, 16, 03, 01, 00, 00, 12, 00, 08, 00, 00, 00, 00, 00, 00, 00, 03, 00, 11, 00, 04, 00, 01, " +
"00, 00, 00, 00, 00, 00, 00";
"5D 00 48 00 15 00 12 00 0C 00 3D 00 11 00 8C 01 03 01 C8 59 CF 00 00 " +
"00 00 00 0A 00 0C 00 00 00 00 00 00 00 00 00 00 00 03 00 0B 00 06 00 " +
"7C 16 03 01 00 00 12 00 08 00 00 00 00 00 00 00 03 00 11 00 04 00 01 " +
"00 00 00 00 00 00 00";
byte[] data = HexRead.readFromString(dump);

List records = RecordFactory.createRecords(new ByteArrayInputStream(data));

+ 91
- 119
src/testcases/org/apache/poi/hssf/record/TestSSTRecordSizeCalculator.java Visa fil

@@ -1,4 +1,3 @@

/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@@ -15,133 +14,106 @@
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */

package org.apache.poi.hssf.record;

import junit.framework.TestCase;

import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.IntMapper;

/**
* Tests that records size calculates correctly.
*
*
* @author Glen Stampoultzis (glens at apache.org)
*/
public class TestSSTRecordSizeCalculator
extends TestCase
{
private static final String SMALL_STRING = "Small string";
private static final int COMPRESSED_PLAIN_STRING_OVERHEAD = 3;
// private List recordLengths;
private IntMapper strings;
private static final int OPTION_FIELD_SIZE = 1;

public TestSSTRecordSizeCalculator( String s )
{
super( s );
}

public void testBasic()
throws Exception
{
strings.add(makeUnicodeString(SMALL_STRING));
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings);
assertEquals(SSTRecord.SST_RECORD_OVERHEAD + COMPRESSED_PLAIN_STRING_OVERHEAD + SMALL_STRING.length(),
calculator.getRecordSize());
}

public void testBigStringAcrossUnicode()
throws Exception
{
String bigString = new String(new char[SSTRecord.MAX_DATA_SPACE + 100]);
strings.add(makeUnicodeString(bigString));
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings);
assertEquals(SSTRecord.SST_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ SSTRecord.MAX_DATA_SPACE
+ SSTRecord.STD_RECORD_OVERHEAD
+ OPTION_FIELD_SIZE
+ 100,
calculator.getRecordSize());
}

public void testPerfectFit()
throws Exception
{
String perfectFit = new String(new char[SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD]);
strings.add(makeUnicodeString(perfectFit));
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings);
assertEquals(SSTRecord.SST_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ perfectFit.length(),
calculator.getRecordSize());
}

public void testJustOversized()
throws Exception
{
String tooBig = new String(new char[SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD + 1]);
strings.add(makeUnicodeString(tooBig));
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings);
assertEquals(SSTRecord.SST_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ tooBig.length() - 1
// continue record
+ SSTRecord.STD_RECORD_OVERHEAD
+ OPTION_FIELD_SIZE
+ 1,
calculator.getRecordSize());

}

public void testSecondStringStartsOnNewContinuation()
throws Exception
{
String perfectFit = new String(new char[SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD]);
strings.add(makeUnicodeString(perfectFit));
strings.add(makeUnicodeString(SMALL_STRING));
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings);
assertEquals(SSTRecord.SST_RECORD_OVERHEAD
+ SSTRecord.MAX_DATA_SPACE
// second string
+ SSTRecord.STD_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ SMALL_STRING.length(),
calculator.getRecordSize());
}

public void testHeaderCrossesNormalContinuePoint()
throws Exception
{
String almostPerfectFit = new String(new char[SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD - 2]);
strings.add(makeUnicodeString(almostPerfectFit));
String oneCharString = new String(new char[1]);
strings.add(makeUnicodeString(oneCharString));
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings);
assertEquals(SSTRecord.SST_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ almostPerfectFit.length()
// second string
+ SSTRecord.STD_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ oneCharString.length(),
calculator.getRecordSize());

}


public void setUp()
{
strings = new IntMapper();
}


private UnicodeString makeUnicodeString( String s )
{
UnicodeString st = new UnicodeString(s);
st.setOptionFlags((byte)0);
return st;
}

public final class TestSSTRecordSizeCalculator extends TestCase {
private static final String SMALL_STRING = "Small string";
private static final int COMPRESSED_PLAIN_STRING_OVERHEAD = 3;
private static final int OPTION_FIELD_SIZE = 1;
private final IntMapper strings = new IntMapper();

private void confirmSize(int expectedSize) {
ContinuableRecordOutput cro = ContinuableRecordOutput.createForCountingOnly();
SSTSerializer ss = new SSTSerializer(strings, 0, 0);
ss.serialize(cro);
assertEquals(expectedSize, cro.getTotalSize());
}

public void testBasic() {
strings.add(makeUnicodeString(SMALL_STRING));
confirmSize(SSTRecord.SST_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ SMALL_STRING.length());
}

public void testBigStringAcrossUnicode() {
int bigString = SSTRecord.MAX_DATA_SPACE + 100;
strings.add(makeUnicodeString(bigString));
confirmSize(SSTRecord.SST_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ SSTRecord.MAX_DATA_SPACE
+ SSTRecord.STD_RECORD_OVERHEAD
+ OPTION_FIELD_SIZE
+ 100);
}

public void testPerfectFit() {
int perfectFit = SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD;
strings.add(makeUnicodeString(perfectFit));
confirmSize(SSTRecord.SST_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ perfectFit);
}

public void testJustOversized() {
int tooBig = SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD + 1;
strings.add(makeUnicodeString(tooBig));
confirmSize(SSTRecord.SST_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ tooBig - 1
// continue record
+ SSTRecord.STD_RECORD_OVERHEAD
+ OPTION_FIELD_SIZE + 1);

}

public void testSecondStringStartsOnNewContinuation() {
int perfectFit = SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD;
strings.add(makeUnicodeString(perfectFit));
strings.add(makeUnicodeString(SMALL_STRING));
confirmSize(SSTRecord.SST_RECORD_OVERHEAD
+ SSTRecord.MAX_DATA_SPACE
// second string
+ SSTRecord.STD_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ SMALL_STRING.length());
}

public void testHeaderCrossesNormalContinuePoint() {
int almostPerfectFit = SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD - 2;
strings.add(makeUnicodeString(almostPerfectFit));
String oneCharString = new String(new char[1]);
strings.add(makeUnicodeString(oneCharString));
confirmSize(SSTRecord.SST_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ almostPerfectFit
// second string
+ SSTRecord.STD_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ oneCharString.length());

}
private static UnicodeString makeUnicodeString(int size) {
String s = new String(new char[size]);
return makeUnicodeString(s);
}

private static UnicodeString makeUnicodeString(String s) {
UnicodeString st = new UnicodeString(s);
st.setOptionFlags((byte) 0);
return st;
}
}

+ 68
- 25
src/testcases/org/apache/poi/hssf/record/TestStringRecord.java Visa fil

@@ -18,6 +18,12 @@
package org.apache.poi.hssf.record;


import org.apache.poi.util.HexRead;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianByteArrayInputStream;
import org.apache.poi.util.LittleEndianInput;

import junit.framework.AssertionFailedError;
import junit.framework.TestCase;

/**
@@ -28,29 +34,66 @@ import junit.framework.TestCase;
* @author Glen Stampoultzis (glens at apache.org)
*/
public final class TestStringRecord extends TestCase {
byte[] data = new byte[] {
(byte)0x0B,(byte)0x00, // length
(byte)0x00, // option
// string
(byte)0x46,(byte)0x61,(byte)0x68,(byte)0x72,(byte)0x7A,(byte)0x65,(byte)0x75,(byte)0x67,(byte)0x74,(byte)0x79,(byte)0x70
};

public void testLoad() {

StringRecord record = new StringRecord(TestcaseRecordInputStream.create(0x207, data));
assertEquals( "Fahrzeugtyp", record.getString());

assertEquals( 18, record.getRecordSize() );
}

public void testStore()
{
StringRecord record = new StringRecord();
record.setString("Fahrzeugtyp");

byte [] recordBytes = record.serialize();
assertEquals(recordBytes.length - 4, data.length);
for (int i = 0; i < data.length; i++)
assertEquals("At offset " + i, data[i], recordBytes[i+4]);
}
private static final byte[] data = HexRead.readFromString(
"0B 00 " + // length
"00 " + // option
// string
"46 61 68 72 7A 65 75 67 74 79 70"
);

public void testLoad() {

StringRecord record = new StringRecord(TestcaseRecordInputStream.create(0x207, data));
assertEquals( "Fahrzeugtyp", record.getString());

assertEquals( 18, record.getRecordSize() );
}

public void testStore() {
StringRecord record = new StringRecord();
record.setString("Fahrzeugtyp");

byte [] recordBytes = record.serialize();
assertEquals(recordBytes.length - 4, data.length);
for (int i = 0; i < data.length; i++)
assertEquals("At offset " + i, data[i], recordBytes[i+4]);
}
public void testContinue() {
int MAX_BIFF_DATA = RecordInputStream.MAX_RECORD_DATA_SIZE;
int TEXT_LEN = MAX_BIFF_DATA + 1000; // deliberately over-size
String textChunk = "ABCDEGGHIJKLMNOP"; // 16 chars
StringBuffer sb = new StringBuffer(16384);
while (sb.length() < TEXT_LEN) {
sb.append(textChunk);
}
sb.setLength(TEXT_LEN);

StringRecord sr = new StringRecord();
sr.setString(sb.toString());
byte[] ser = sr.serialize();
assertEquals(StringRecord.sid, LittleEndian.getUShort(ser, 0));
if (LittleEndian.getUShort(ser, 2) > MAX_BIFF_DATA) {
throw new AssertionFailedError(
"StringRecord should have been split with a continue record");
}
// Confirm expected size of first record, and ushort strLen.
assertEquals(MAX_BIFF_DATA, LittleEndian.getUShort(ser, 2));
assertEquals(TEXT_LEN, LittleEndian.getUShort(ser, 4));

// Confirm first few bytes of ContinueRecord
LittleEndianInput crIn = new LittleEndianByteArrayInputStream(ser, (MAX_BIFF_DATA + 4));
int nCharsInFirstRec = MAX_BIFF_DATA - (2 + 1); // strLen, optionFlags
int nCharsInSecondRec = TEXT_LEN - nCharsInFirstRec;
assertEquals(ContinueRecord.sid, crIn.readUShort());
assertEquals(1 + nCharsInSecondRec, crIn.readUShort());
assertEquals(0, crIn.readUByte());
assertEquals('N', crIn.readUByte());
assertEquals('O', crIn.readUByte());

// re-read and make sure string value is the same
RecordInputStream in = TestcaseRecordInputStream.create(ser);
StringRecord sr2 = new StringRecord(in);
assertEquals(sb.toString(), sr2.getString());
}
}

+ 5
- 8
src/testcases/org/apache/poi/hssf/record/TestSupBookRecord.java Visa fil

@@ -69,10 +69,10 @@ public final class TestSupBookRecord extends TestCase {
assertEquals( 34, record.getRecordSize() ); //sid+size+data
assertEquals("testURL", record.getURL());
UnicodeString[] sheetNames = record.getSheetNames();
String[] sheetNames = record.getSheetNames();
assertEquals(2, sheetNames.length);
assertEquals("Sheet1", sheetNames[0].getString());
assertEquals("Sheet2", sheetNames[1].getString());
assertEquals("Sheet1", sheetNames[0]);
assertEquals("Sheet2", sheetNames[1]);
}
/**
@@ -97,11 +97,8 @@ public final class TestSupBookRecord extends TestCase {
}
public void testStoreER() {
UnicodeString url = new UnicodeString("testURL");
UnicodeString[] sheetNames = {
new UnicodeString("Sheet1"),
new UnicodeString("Sheet2"),
};
String url = "testURL";
String[] sheetNames = { "Sheet1", "Sheet2", };
SupBookRecord record = SupBookRecord.createExternalReferences(url, sheetNames);

TestcaseRecordInputStream.confirmRecordEncoding(0x01AE, dataER, record.serialize());

+ 4
- 4
src/testcases/org/apache/poi/hssf/record/TestTextObjectBaseRecord.java Visa fil

@@ -44,9 +44,9 @@ public final class TestTextObjectBaseRecord extends TestCase {
"00 00" +
"00 00 " +
"3C 00 " + // ContinueRecord.sid
"05 00 " + // size 5
"01 " + // unicode uncompressed
"41 00 42 00 " + // 'AB'
"03 00 " + // size 3
"00 " + // unicode compressed
"41 42 " + // 'AB'
"3C 00 " + // ContinueRecord.sid
"10 00 " + // size 16
"00 00 18 00 00 00 00 00 " +
@@ -63,7 +63,7 @@ public final class TestTextObjectBaseRecord extends TestCase {
assertEquals(true, record.isTextLocked());
assertEquals(TextObjectRecord.TEXT_ORIENTATION_ROT_RIGHT, record.getTextOrientation());

assertEquals(51, record.getRecordSize() );
assertEquals(49, record.getRecordSize() );
}

public void testStore()

+ 32
- 34
src/testcases/org/apache/poi/hssf/record/TestTextObjectRecord.java Visa fil

@@ -37,16 +37,14 @@ import org.apache.poi.util.LittleEndian;
public final class TestTextObjectRecord extends TestCase {
private static final byte[] simpleData = HexRead.readFromString(
"B6 01 12 00 " +
"12 02 00 00 00 00 00 00" +
"00 00 0D 00 08 00 00 00" +
"00 00 " +
"3C 00 1B 00 " +
"01 48 00 65 00 6C 00 6C 00 6F 00 " +
"2C 00 20 00 57 00 6F 00 72 00 6C " +
"00 64 00 21 00 " +
"3C 00 08 " +
"00 0D 00 00 00 00 00 00 00"
"B6 01 12 00 " +
"12 02 00 00 00 00 00 00" +
"00 00 0D 00 08 00 00 00" +
"00 00 " +
"3C 00 0E 00 " +
"00 48 65 6C 6C 6F 2C 20 57 6F 72 6C 64 21 " +
"3C 00 08 " +
"00 0D 00 00 00 00 00 00 00"
);
@@ -92,12 +90,12 @@ public final class TestTextObjectRecord extends TestCase {
record.setStr(str);
byte [] ser = record.serialize();
int formatDataLen = LittleEndian.getUShort(ser, 16);
assertEquals("formatDataLength", 0, formatDataLen);
assertEquals(22, ser.length); // just the TXO record
//read again
RecordInputStream is = TestcaseRecordInputStream.create(ser);
record = new TextObjectRecord(is);
@@ -152,38 +150,38 @@ public final class TestTextObjectRecord extends TestCase {
byte[] cln = cloned.serialize();
assertTrue(Arrays.equals(src, cln));
}
/** similar to {@link #simpleData} but with link formula at end of TXO rec*/
/** similar to {@link #simpleData} but with link formula at end of TXO rec*/
private static final byte[] linkData = HexRead.readFromString(
"B6 01 " + // TextObjectRecord.sid
"1E 00 " + // size 18
"44 02 02 00 00 00 00 00" +
"00 00 " +
"02 00 " + // strLen 2
"10 00 " + // 16 bytes for 2 format runs
"00 00 00 00 " +
"B6 01 " + // TextObjectRecord.sid
"1E 00 " + // size 18
"44 02 02 00 00 00 00 00" +
"00 00 " +
"02 00 " + // strLen 2
"10 00 " + // 16 bytes for 2 format runs
"00 00 00 00 " +
"05 00 " + // formula size
"D4 F0 8A 03 " + // unknownInt
"24 01 00 13 C0 " + //tRef(T2)
"13 " + // ??
"3C 00 " + // ContinueRecord.sid
"05 00 " + // size 5
"01 " + // unicode uncompressed
"41 00 42 00 " + // 'AB'
"3C 00 " + // ContinueRecord.sid
"10 00 " + // size 16
"00 00 18 00 00 00 00 00 " +
"02 00 00 00 00 00 00 00 "
"3C 00 " + // ContinueRecord.sid
"03 00 " + // size 3
"00 " + // unicode compressed
"41 42 " + // 'AB'
"3C 00 " + // ContinueRecord.sid
"10 00 " + // size 16
"00 00 18 00 00 00 00 00 " +
"02 00 00 00 00 00 00 00 "
);
public void testLinkFormula() {
RecordInputStream is = new RecordInputStream(new ByteArrayInputStream(linkData));
is.nextRecord();
TextObjectRecord rec = new TextObjectRecord(is);
Ptg ptg = rec.getLinkRefPtg();
assertNotNull(ptg);
assertEquals(RefPtg.class, ptg.getClass());
@@ -193,6 +191,6 @@ public final class TestTextObjectRecord extends TestCase {
byte [] data2 = rec.serialize();
assertEquals(linkData.length, data2.length);
assertTrue(Arrays.equals(linkData, data2));
}
}
}

+ 65
- 53
src/testcases/org/apache/poi/hssf/record/TestUnicodeString.java Visa fil

@@ -15,115 +15,123 @@
limitations under the License.
==================================================================== */


package org.apache.poi.hssf.record;

import org.apache.poi.util.HexRead;

import junit.framework.TestCase;

import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;

/**
* Tests that records size calculates correctly.
* Tests that {@link UnicodeString} record size calculates correctly. The record size
* is used when serializing {@link SSTRecord}s.
*
* @author Jason Height (jheight at apache.org)
*/
public final class TestUnicodeString extends TestCase {
private static final int MAX_DATA_SIZE = RecordInputStream.MAX_RECORD_DATA_SIZE;

/** a 4 character string requiring 16 bit encoding */
private static final String STR_16_BIT = "A\u591A\u8A00\u8A9E";

private static void confirmSize(int expectedSize, UnicodeString s) {
confirmSize(expectedSize, s, 0);
}
/**
* Note - a value of zero for <tt>amountUsedInCurrentRecord</tt> would only ever occur just
* after a {@link ContinueRecord} had been started. In the initial {@link SSTRecord} this
* value starts at 8 (for the first {@link UnicodeString} written). In general, it can be
* any value between 0 and {@link #MAX_DATA_SIZE}
*/
private static void confirmSize(int expectedSize, UnicodeString s, int amountUsedInCurrentRecord) {
ContinuableRecordOutput out = ContinuableRecordOutput.createForCountingOnly();
out.writeContinue();
for(int i=amountUsedInCurrentRecord; i>0; i--) {
out.writeByte(0);
}
int size0 = out.getTotalSize();
s.serialize(out);
int size1 = out.getTotalSize();
int actualSize = size1-size0;
assertEquals(expectedSize, actualSize);
}

public void testSmallStringSize() {
//Test a basic string
UnicodeString s = makeUnicodeString("Test");
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(7, stats.recordSize);
confirmSize(7, s);

//Test a small string that is uncompressed
s = makeUnicodeString(STR_16_BIT);
s.setOptionFlags((byte)0x01);
stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(11, stats.recordSize);
confirmSize(11, s);

//Test a compressed small string that has rich text formatting
s.setString("Test");
s.setOptionFlags((byte)0x8);
UnicodeString.FormatRun r = new UnicodeString.FormatRun((short)0,(short)1);
s.addFormatRun(r);
UnicodeString.FormatRun r2 = new UnicodeString.FormatRun((short)2,(short)2);
s.addFormatRun(r2);
stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(17, stats.recordSize);
confirmSize(17, s);

//Test a uncompressed small string that has rich text formatting
s.setString(STR_16_BIT);
s.setOptionFlags((byte)0x9);
stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(21, stats.recordSize);
confirmSize(21, s);

//Test a compressed small string that has rich text and extended text
s.setString("Test");
s.setOptionFlags((byte)0xC);
s.setExtendedRst(new byte[]{(byte)0x1,(byte)0x2,(byte)0x3,(byte)0x4,(byte)0x5});
stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(26, stats.recordSize);
confirmSize(26, s);

//Test a uncompressed small string that has rich text and extended text
s.setString(STR_16_BIT);
s.setOptionFlags((byte)0xD);
stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(30, stats.recordSize);
confirmSize(30, s);
}

public void testPerfectStringSize() {
//Test a basic string
UnicodeString s = makeUnicodeString(SSTRecord.MAX_RECORD_SIZE-2-1);
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE, stats.recordSize);
UnicodeString s = makeUnicodeString(MAX_DATA_SIZE-2-1);
confirmSize(MAX_DATA_SIZE, s);

//Test an uncompressed string
//Note that we can only ever get to a maximim size of 8227 since an uncompressed
//string is writing double bytes.
s = makeUnicodeString((SSTRecord.MAX_RECORD_SIZE-2-1)/2);
s = makeUnicodeString((MAX_DATA_SIZE-2-1)/2, true);
s.setOptionFlags((byte)0x1);
stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE-1, stats.recordSize);
confirmSize(MAX_DATA_SIZE-1, s);
}

public void testPerfectRichStringSize() {
//Test a rich text string
UnicodeString s = makeUnicodeString(SSTRecord.MAX_RECORD_SIZE-2-1-8-2);
UnicodeString s = makeUnicodeString(MAX_DATA_SIZE-2-1-8-2);
s.addFormatRun(new UnicodeString.FormatRun((short)1,(short)0));
s.addFormatRun(new UnicodeString.FormatRun((short)2,(short)1));
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats();
s.setOptionFlags((byte)0x8);
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE, stats.recordSize);
confirmSize(MAX_DATA_SIZE, s);

//Test an uncompressed rich text string
//Note that we can only ever get to a maximim size of 8227 since an uncompressed
//Note that we can only ever get to a maximum size of 8227 since an uncompressed
//string is writing double bytes.
s = makeUnicodeString((SSTRecord.MAX_RECORD_SIZE-2-1-8-2)/2);
s = makeUnicodeString((MAX_DATA_SIZE-2-1-8-2)/2, true);
s.addFormatRun(new UnicodeString.FormatRun((short)1,(short)0));
s.addFormatRun(new UnicodeString.FormatRun((short)2,(short)1));
s.setOptionFlags((byte)0x9);
stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE-1, stats.recordSize);
confirmSize(MAX_DATA_SIZE-1, s);
}

public void testContinuedStringSize() {
//Test a basic string
UnicodeString s = makeUnicodeString(SSTRecord.MAX_RECORD_SIZE-2-1+20);
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE+4+1+20, stats.recordSize);
UnicodeString s = makeUnicodeString(MAX_DATA_SIZE-2-1+20);
confirmSize(MAX_DATA_SIZE+4+1+20, s);
}

/** Tests that a string size calculation that fits neatly in two records, the second being a continue*/
public void testPerfectContinuedStringSize() {
//Test a basic string
int strSize = SSTRecord.MAX_RECORD_SIZE*2;
int strSize = MAX_DATA_SIZE*2;
//String overhead
strSize -= 3;
//Continue Record overhead
@@ -131,25 +139,29 @@ public final class TestUnicodeString extends TestCase {
//Continue Record additional byte overhead
strSize -= 1;
UnicodeString s = makeUnicodeString(strSize);
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats();
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE*2, stats.recordSize);
confirmSize(MAX_DATA_SIZE*2, s);
}




private static UnicodeString makeUnicodeString( String s )
{
private static UnicodeString makeUnicodeString(String s) {
UnicodeString st = new UnicodeString(s);
st.setOptionFlags((byte)0);
return st;
}

private static UnicodeString makeUnicodeString( int numChars) {
private static UnicodeString makeUnicodeString(int numChars) {
return makeUnicodeString(numChars, false);
}
/**
* @param is16Bit if <code>true</code> the created string will have characters > 0x00FF
* @return a string of the specified number of characters
*/
private static UnicodeString makeUnicodeString(int numChars, boolean is16Bit) {
StringBuffer b = new StringBuffer(numChars);
int charBase = is16Bit ? 0x8A00 : 'A';
for (int i=0;i<numChars;i++) {
b.append(i%10);
char ch = (char) ((i%16)+charBase);
b.append(ch);
}
return makeUnicodeString(b.toString());
}

+ 1
- 2
src/testcases/org/apache/poi/hssf/record/constant/TestConstantValueParser.java Visa fil

@@ -22,7 +22,6 @@ import java.util.Arrays;
import junit.framework.TestCase;

import org.apache.poi.hssf.record.TestcaseRecordInputStream;
import org.apache.poi.hssf.record.UnicodeString;
import org.apache.poi.hssf.usermodel.HSSFErrorConstants;
import org.apache.poi.util.HexRead;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
@@ -36,7 +35,7 @@ public final class TestConstantValueParser extends TestCase {
Boolean.TRUE,
null,
new Double(1.1),
new UnicodeString("Sample text"),
"Sample text",
ErrorConstant.valueOf(HSSFErrorConstants.ERROR_DIV_0),
};
private static final byte[] SAMPLE_ENCODING = HexRead.readFromString(

+ 2
- 2
src/testcases/org/apache/poi/hssf/record/formula/TestArrayPtg.java Visa fil

@@ -66,10 +66,10 @@ public final class TestArrayPtg extends TestCase {
assertEquals(Boolean.TRUE, values[0][0]);
assertEquals(new UnicodeString("ABCD"), values[0][1]);
assertEquals("ABCD", values[0][1]);
assertEquals(new Double(0), values[1][0]);
assertEquals(Boolean.FALSE, values[1][1]);
assertEquals(new UnicodeString("FG"), values[1][2]);
assertEquals("FG", values[1][2]);
byte[] outBuf = new byte[ENCODED_CONSTANT_DATA.length];
ptg.writeTokenValueBytes(new LittleEndianByteArrayOutputStream(outBuf, 0));

+ 79
- 38
src/testcases/org/apache/poi/hssf/usermodel/TestHSSFCell.java Visa fil

@@ -28,8 +28,8 @@ import org.apache.poi.hssf.model.Sheet;
import org.apache.poi.hssf.util.HSSFColor;

/**
* Tests various functionity having to do with HSSFCell. For instance support for
* paticular datatypes, etc.
* Tests various functionality having to do with {@link HSSFCell}. For instance support for
* particular datatypes, etc.
* @author Andrew C. Oliver (andy at superlinksoftware dot com)
* @author Dan Sherman (dsherman at isisph.com)
* @author Alex Jacoby (ajacoby at gmail.com)
@@ -345,41 +345,82 @@ public final class TestHSSFCell extends TestCase {
}
}

/**
* Test to ensure we can only assign cell styles that belong
* to our workbook, and not those from other workbooks.
*/
public void testCellStyleWorkbookMatch() throws Exception {
HSSFWorkbook wbA = new HSSFWorkbook();
HSSFWorkbook wbB = new HSSFWorkbook();
HSSFCellStyle styA = wbA.createCellStyle();
HSSFCellStyle styB = wbB.createCellStyle();
styA.verifyBelongsToWorkbook(wbA);
styB.verifyBelongsToWorkbook(wbB);
try {
styA.verifyBelongsToWorkbook(wbB);
fail();
} catch(IllegalArgumentException e) {}
try {
styB.verifyBelongsToWorkbook(wbA);
fail();
} catch(IllegalArgumentException e) {}
HSSFCell cellA = wbA.createSheet().createRow(0).createCell(0);
HSSFCell cellB = wbB.createSheet().createRow(0).createCell(0);
cellA.setCellStyle(styA);
cellB.setCellStyle(styB);
try {
cellA.setCellStyle(styB);
fail();
} catch(IllegalArgumentException e) {}
try {
cellB.setCellStyle(styA);
fail();
} catch(IllegalArgumentException e) {}
}
/**
* Test to ensure we can only assign cell styles that belong
* to our workbook, and not those from other workbooks.
*/
public void testCellStyleWorkbookMatch() {
HSSFWorkbook wbA = new HSSFWorkbook();
HSSFWorkbook wbB = new HSSFWorkbook();

HSSFCellStyle styA = wbA.createCellStyle();
HSSFCellStyle styB = wbB.createCellStyle();

styA.verifyBelongsToWorkbook(wbA);
styB.verifyBelongsToWorkbook(wbB);
try {
styA.verifyBelongsToWorkbook(wbB);
fail();
} catch (IllegalArgumentException e) {}
try {
styB.verifyBelongsToWorkbook(wbA);
fail();
} catch (IllegalArgumentException e) {}

HSSFCell cellA = wbA.createSheet().createRow(0).createCell(0);
HSSFCell cellB = wbB.createSheet().createRow(0).createCell(0);

cellA.setCellStyle(styA);
cellB.setCellStyle(styB);
try {
cellA.setCellStyle(styB);
fail();
} catch (IllegalArgumentException e) {}
try {
cellB.setCellStyle(styA);
fail();
} catch (IllegalArgumentException e) {}
}

public void testChangeTypeStringToBool() {
HSSFCell cell = new HSSFWorkbook().createSheet("Sheet1").createRow(0).createCell(0);

cell.setCellValue(new HSSFRichTextString("TRUE"));
assertEquals(HSSFCell.CELL_TYPE_STRING, cell.getCellType());
try {
cell.setCellType(HSSFCell.CELL_TYPE_BOOLEAN);
} catch (ClassCastException e) {
throw new AssertionFailedError(
"Identified bug in conversion of cell from text to boolean");
}

assertEquals(HSSFCell.CELL_TYPE_BOOLEAN, cell.getCellType());
assertEquals(true, cell.getBooleanCellValue());
cell.setCellType(HSSFCell.CELL_TYPE_STRING);
assertEquals("TRUE", cell.getRichStringCellValue().getString());

// 'false' text to bool and back
cell.setCellValue(new HSSFRichTextString("FALSE"));
cell.setCellType(HSSFCell.CELL_TYPE_BOOLEAN);
assertEquals(HSSFCell.CELL_TYPE_BOOLEAN, cell.getCellType());
assertEquals(false, cell.getBooleanCellValue());
cell.setCellType(HSSFCell.CELL_TYPE_STRING);
assertEquals("FALSE", cell.getRichStringCellValue().getString());
}

public void testChangeTypeBoolToString() {
HSSFCell cell = new HSSFWorkbook().createSheet("Sheet1").createRow(0).createCell(0);
cell.setCellValue(true);
try {
cell.setCellType(HSSFCell.CELL_TYPE_STRING);
} catch (IllegalStateException e) {
if (e.getMessage().equals("Cannot get a text value from a boolean cell")) {
throw new AssertionFailedError(
"Identified bug in conversion of cell from boolean to text");
}
throw e;
}
assertEquals("TRUE", cell.getRichStringCellValue().getString());
}
}


Laddar…
Avbryt
Spara