import java.text.NumberFormat;
import java.util.ArrayList;
-import java.util.Hashtable;
+import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
private final HSSFListener _childListener;
private final HSSFDataFormatter _formatter;
private final NumberFormat _defaultFormat;
- private final Map<Integer, FormatRecord> _customFormatRecords = new Hashtable<Integer, FormatRecord>();
+ private final Map<Integer, FormatRecord> _customFormatRecords = new HashMap<Integer, FormatRecord>();
private final List<ExtendedFormatRecord> _xfRecords = new ArrayList<ExtendedFormatRecord>();
/**
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
-import java.util.HashMap;
import java.util.HashSet;
-import java.util.Hashtable;
+import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
* this holds the HSSFFont objects attached to this workbook.
* We only create these from the low level records as required.
*/
- private Hashtable<Short,HSSFFont> fonts;
+ private Map<Short,HSSFFont> fonts;
/**
* holds whether or not to preserve other nodes in the POIFS. Used
*/
@Override
public HSSFFont getFontAt(short idx) {
- if(fonts == null) fonts = new Hashtable<Short, HSSFFont>();
+ if(fonts == null) fonts = new HashMap<Short, HSSFFont>();
// So we don't confuse users, give them back
// the same object every time, but create
* and that's not something you should normally do
*/
protected void resetFontCache() {
- fonts = new Hashtable<Short, HSSFFont>();
+ fonts = new HashMap<Short, HSSFFont>();
}
/**
import java.lang.reflect.Field;
import java.util.Collections;
-import java.util.Hashtable;
+import java.util.HashMap;
import java.util.Map;
import org.apache.poi.ss.usermodel.Color;
return indexHash;
}
/**
- * This function returns all the Colours, stored in a Hashtable that
+ * This function returns all the Colours, stored in a Map that
* can be edited. No caching is performed. If you don't need to edit
* the table, then call {@link #getIndexHash()} which returns a
* statically cached imuatable map of colours.
*/
- public final static Hashtable<Integer,HSSFColor> getMutableIndexHash() {
+ public final static Map<Integer,HSSFColor> getMutableIndexHash() {
return createColorsByIndexMap();
}
- private static Hashtable<Integer,HSSFColor> createColorsByIndexMap() {
+ private static Map<Integer,HSSFColor> createColorsByIndexMap() {
HSSFColor[] colors = getAllColors();
- Hashtable<Integer,HSSFColor> result = new Hashtable<Integer,HSSFColor>(colors.length * 3 / 2);
+ Map<Integer,HSSFColor> result = new HashMap<Integer,HSSFColor>(colors.length * 3 / 2);
for (int i = 0; i < colors.length; i++) {
HSSFColor color = colors[i];
* it takes to create it once per request but you will not hold onto it
* if you have none of those requests.
*
- * @return a hashtable containing all colors keyed by String gnumeric-like triplets
+ * @return a Map containing all colors keyed by String gnumeric-like triplets
*/
- public final static Hashtable<String,HSSFColor> getTripletHash()
+ public final static Map<String,HSSFColor> getTripletHash()
{
return createColorsByHexStringMap();
}
- private static Hashtable<String,HSSFColor> createColorsByHexStringMap() {
+ private static Map<String,HSSFColor> createColorsByHexStringMap() {
HSSFColor[] colors = getAllColors();
- Hashtable<String,HSSFColor> result = new Hashtable<String,HSSFColor>(colors.length * 3 / 2);
+ Map<String,HSSFColor> result = new HashMap<String,HSSFColor>(colors.length * 3 / 2);
for (int i = 0; i < colors.length; i++) {
HSSFColor color = colors[i];
/**
* Special Default/Normal/Automatic color.
- * <p><i>Note:</i> This class is NOT in the default HashTables returned by HSSFColor.
+ * <p><i>Note:</i> This class is NOT in the default Map returned by HSSFColor.
* The index is a special case which is interpreted in the various setXXXColor calls.
*
* @author Jason
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
-import java.util.Hashtable;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReentrantReadWriteLock;
* Initialize the package instance.
*/
private void init() {
- this.partMarshallers = new Hashtable<ContentType, PartMarshaller>(5);
- this.partUnmarshallers = new Hashtable<ContentType, PartUnmarshaller>(2);
+ this.partMarshallers = new HashMap<ContentType, PartMarshaller>(5);
+ this.partUnmarshallers = new HashMap<ContentType, PartUnmarshaller>(2);
try {
// Add 'default' unmarshaller
package org.apache.poi.openxml4j.opc.internal;
-import java.util.Hashtable;
+import java.util.Collections;
+import java.util.HashMap;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Type in Type/Subtype.
*/
- private String type;
+ private final String type;
/**
* Subtype
*/
- private String subType;
+ private final String subType;
/**
* Parameters
*/
- private Hashtable<String, String> parameters;
+ private final Map<String, String> parameters;
/**
* Media type compiled pattern, without parameters
public ContentType(String contentType) throws InvalidFormatException {
Matcher mMediaType = patternTypeSubType.matcher(contentType);
if (!mMediaType.matches()) {
- // How about with parameters?
- mMediaType = patternTypeSubTypeParams.matcher(contentType);
+ // How about with parameters?
+ mMediaType = patternTypeSubTypeParams.matcher(contentType);
}
- if (!mMediaType.matches()) {
+ if (!mMediaType.matches()) {
throw new InvalidFormatException(
"The specified content type '"
+ contentType
+ "' is not compliant with RFC 2616: malformed content type.");
- }
+ }
// Type/subtype
if (mMediaType.groupCount() >= 2) {
this.subType = mMediaType.group(2);
// Parameters
- this.parameters = new Hashtable<String, String>(1);
+ this.parameters = new HashMap<String, String>();
// Java RegExps are unhelpful, and won't do multiple group captures
// See http://docs.oracle.com/javase/6/docs/api/java/util/regex/Pattern.html#cg
if (mMediaType.groupCount() >= 5) {
- Matcher mParams = patternParams.matcher(contentType.substring(mMediaType.end(2)));
- while (mParams.find()) {
- this.parameters.put(mParams.group(1), mParams.group(2));
- }
+ Matcher mParams = patternParams.matcher(contentType.substring(mMediaType.end(2)));
+ while (mParams.find()) {
+ this.parameters.put(mParams.group(1), mParams.group(2));
+ }
}
+ } else {
+ // missing media type and subtype
+ this.type = "";
+ this.subType = "";
+ this.parameters = Collections.emptyMap();
}
}
retVal.append(this.getSubType());
if (withParameters) {
- for (Map.Entry<String, String> me : parameters.entrySet()) {
+ for (Entry<String, String> me : parameters.entrySet()) {
retVal.append(";");
retVal.append(me.getKey());
retVal.append("=");
// Check the types on everything
for (PackagePart part : p.getParts()) {
+ final String contentType = part.getContentType();
+ final ContentType details = part.getContentTypeDetails();
+ final int length = details.getParameterKeys().length;
+ final boolean hasParameters = details.hasParameters();
+
// _rels type doesn't have any params
if (part.isRelationshipPart()) {
- assertEquals(ContentTypes.RELATIONSHIPS_PART, part.getContentType());
- assertEquals(ContentTypes.RELATIONSHIPS_PART, part.getContentTypeDetails().toString());
- assertEquals(false, part.getContentTypeDetails().hasParameters());
- assertEquals(0, part.getContentTypeDetails().getParameterKeys().length);
+ assertEquals(ContentTypes.RELATIONSHIPS_PART, contentType);
+ assertEquals(ContentTypes.RELATIONSHIPS_PART, details.toString());
+ assertEquals(false, hasParameters);
+ assertEquals(0, length);
}
// Core type doesn't have any params
else if (part.getPartName().toString().equals("/docProps/core.xml")) {
- assertEquals(ContentTypes.CORE_PROPERTIES_PART, part.getContentType());
- assertEquals(ContentTypes.CORE_PROPERTIES_PART, part.getContentTypeDetails().toString());
- assertEquals(false, part.getContentTypeDetails().hasParameters());
- assertEquals(0, part.getContentTypeDetails().getParameterKeys().length);
+ assertEquals(ContentTypes.CORE_PROPERTIES_PART, contentType);
+ assertEquals(ContentTypes.CORE_PROPERTIES_PART, details.toString());
+ assertEquals(false, hasParameters);
+ assertEquals(0, length);
}
// Global Crs types do have params
else if (part.getPartName().toString().equals("/global1dCrs.xml")) {
- assertEquals(typeResqml, part.getContentType().substring(0, typeResqml.length()));
- assertEquals(typeResqml, part.getContentTypeDetails().toString(false));
- assertEquals(true, part.getContentTypeDetails().hasParameters());
- assertEquals(typeResqml+";version=2.0;type=obj_global1dCrs", part.getContentTypeDetails().toString());
- assertEquals(2, part.getContentTypeDetails().getParameterKeys().length);
- assertEquals("2.0", part.getContentTypeDetails().getParameter("version"));
- assertEquals("obj_global1dCrs", part.getContentTypeDetails().getParameter("type"));
+ assertTrue(part.getContentType().startsWith(typeResqml));
+ assertEquals(typeResqml, details.toString(false));
+ assertEquals(true, hasParameters);
+ assertContains("version=2.0", details.toString());
+ assertContains("type=obj_global1dCrs", details.toString());
+ assertEquals(2, length);
+ assertEquals("2.0", details.getParameter("version"));
+ assertEquals("obj_global1dCrs", details.getParameter("type"));
}
else if (part.getPartName().toString().equals("/global2dCrs.xml")) {
- assertEquals(typeResqml, part.getContentType().substring(0, typeResqml.length()));
- assertEquals(typeResqml, part.getContentTypeDetails().toString(false));
- assertEquals(true, part.getContentTypeDetails().hasParameters());
- assertEquals(typeResqml+";version=2.0;type=obj_global2dCrs", part.getContentTypeDetails().toString());
- assertEquals(2, part.getContentTypeDetails().getParameterKeys().length);
- assertEquals("2.0", part.getContentTypeDetails().getParameter("version"));
- assertEquals("obj_global2dCrs", part.getContentTypeDetails().getParameter("type"));
+ assertTrue(part.getContentType().startsWith(typeResqml));
+ assertEquals(typeResqml, details.toString(false));
+ assertEquals(true, hasParameters);
+ assertContains("version=2.0", details.toString());
+ assertContains("type=obj_global2dCrs", details.toString());
+ assertEquals(2, length);
+ assertEquals("2.0", details.getParameter("version"));
+ assertEquals("obj_global2dCrs", details.getParameter("type"));
}
// Other thingy
else if (part.getPartName().toString().equals("/myTestingGuid.xml")) {
- assertEquals(typeResqml, part.getContentType().substring(0, typeResqml.length()));
- assertEquals(typeResqml, part.getContentTypeDetails().toString(false));
- assertEquals(true, part.getContentTypeDetails().hasParameters());
- assertEquals(typeResqml+";version=2.0;type=obj_tectonicBoundaryFeature", part.getContentTypeDetails().toString());
- assertEquals(2, part.getContentTypeDetails().getParameterKeys().length);
- assertEquals("2.0", part.getContentTypeDetails().getParameter("version"));
- assertEquals("obj_tectonicBoundaryFeature", part.getContentTypeDetails().getParameter("type"));
+ assertTrue(part.getContentType().startsWith(typeResqml));
+ assertEquals(typeResqml, details.toString(false));
+ assertEquals(true, hasParameters);
+ assertContains("version=2.0", details.toString());
+ assertContains("type=obj_tectonicBoundaryFeature", details.toString());
+ assertEquals(2, length);
+ assertEquals("2.0", details.getParameter("version"));
+ assertEquals("obj_tectonicBoundaryFeature", details.getParameter("type"));
}
// That should be it!
else {
fail("Unexpected part " + part);
}
}
- }
+ }
+
+ private static void assertContains(String needle, String haystack) {
+ assertTrue(haystack.contains(needle));
+ }
}
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
-import java.util.Hashtable;
+import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
* Key is a Chunk's type, value is an array of its CommandDefinitions
*/
private final Map<Integer, CommandDefinition[]> chunkCommandDefinitions =
- new Hashtable<Integer, CommandDefinition[]>();
+ new HashMap<Integer, CommandDefinition[]>();
/**
* What the name is of the chunk table definitions file?
* This file comes from the scratchpad resources directory.
CommandDefinition[] defs = defsL.toArray(new CommandDefinition[defsL.size()]);
- // Add to the hashtable
+ // Add to the map
chunkCommandDefinitions.put(Integer.valueOf(chunkType), defs);
}
} finally {
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.OutputStream;
-import java.util.Hashtable;
+import java.util.Map;
import org.apache.poi.poifs.crypt.CipherAlgorithm;
import org.apache.poi.poifs.crypt.EncryptionInfo;
out.write(data, 0, bos.getWriteIndex());
}
- public void updateOtherRecordReferences(Hashtable<Integer,Integer> oldToNewReferencesLookup) {
+ public void updateOtherRecordReferences(Map<Integer,Integer> oldToNewReferencesLookup) {
}
}
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.util.Hashtable;
+import java.util.Map;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
myLastOnDiskOffset = offset;
}
- public void updateOtherRecordReferences(Hashtable<Integer,Integer> oldToNewReferencesLookup) {
+ public void updateOtherRecordReferences(Map<Integer,Integer> oldToNewReferencesLookup) {
return;
}
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
-import java.util.Hashtable;
+import java.util.Collections;
+import java.util.HashMap;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.TreeMap;
import org.apache.poi.hslf.exceptions.CorruptPowerPointFileException;
* You always need to check the most recent PersistPtrHolder
* that knows about a given slide to find the right location
*/
- private Hashtable<Integer,Integer> _slideLocations;
+ private Map<Integer,Integer> _slideLocations;
private static final BitField persistIdFld = new BitField(0X000FFFFF);
private static final BitField cntPersistFld = new BitField(0XFFF00000);
/**
* Get the list of slides that this PersistPtrHolder knows about.
- * (They will be the keys in the hashtable for looking up the positions
+ * (They will be the keys in the map for looking up the positions
* of these slides)
*/
public int[] getKnownSlideIDs() {
* Get the lookup from slide numbers to byte offsets, for the slides
* known about by this PersistPtrHolder.
*/
- public Hashtable<Integer,Integer> getSlideLocationsLookup() {
- return _slideLocations;
+ public Map<Integer,Integer> getSlideLocationsLookup() {
+ return Collections.unmodifiableMap(_slideLocations);
}
/**
// base number for these entries
// count * 32 bit offsets
// Repeat as many times as you have data
- _slideLocations = new Hashtable<Integer,Integer>();
+ _slideLocations = new HashMap<Integer,Integer>();
_ptrData = new byte[len-8];
System.arraycopy(source,start+8,_ptrData,0,_ptrData.length);
* At write-out time, update the references to the sheets to their
* new positions
*/
- public void updateOtherRecordReferences(Hashtable<Integer,Integer> oldToNewReferencesLookup) {
+ public void updateOtherRecordReferences(Map<Integer,Integer> oldToNewReferencesLookup) {
// Loop over all the slides we know about
// Find where they used to live, and where they now live
- for (Map.Entry<Integer,Integer> me : _slideLocations.entrySet()) {
+ for (Entry<Integer,Integer> me : _slideLocations.entrySet()) {
Integer oldPos = me.getValue();
Integer newPos = oldToNewReferencesLookup.get(oldPos);
byte intbuf[] = new byte[4];
int lastPersistEntry = -1;
int lastSlideId = -1;
- for (Map.Entry<Integer,Integer> me : orderedSlideLocations.entrySet()) {
+ for (Entry<Integer,Integer> me : orderedSlideLocations.entrySet()) {
int nextSlideId = me.getKey();
int offset = me.getValue();
try {
==================================================================== */
package org.apache.poi.hslf.record;
-import java.util.Hashtable;
+import java.util.Map;
/**
* Records which either care about where they are on disk, or have other
* Offer the record the list of records that have changed their
* location as part of the writeout.
*/
- public void updateOtherRecordReferences(Hashtable<Integer,Integer> oldToNewReferencesLookup);
+ public void updateOtherRecordReferences(Map<Integer,Integer> oldToNewReferencesLookup);
}
==================================================================== */
package org.apache.poi.hslf.record;
-import java.util.Hashtable;
+import java.util.Map;
/**
* A special (and dangerous) kind of Record Atom that cares about where
* Allows records to update their internal pointers to other records
* locations
*/
- public abstract void updateOtherRecordReferences(Hashtable<Integer,Integer> oldToNewReferencesLookup);
+ public abstract void updateOtherRecordReferences(Map<Integer,Integer> oldToNewReferencesLookup);
}
==================================================================== */
package org.apache.poi.hslf.record;
-import java.util.Hashtable;
+import java.util.Map;
/**
* A special (and dangerous) kind of Record Container, for which other
* Since we're a container, we don't mind if other records move about.
* If we're told they have, just return straight off.
*/
- public void updateOtherRecordReferences(Hashtable<Integer,Integer> oldToNewReferencesLookup) {
+ public void updateOtherRecordReferences(Map<Integer,Integer> oldToNewReferencesLookup) {
return;
}
}
import java.io.IOException;
import java.io.OutputStream;
-import java.util.Hashtable;
+import java.util.Map;
/**
* A UserEdit Atom (type 4085). Holds information which bits of the file
* At write-out time, update the references to PersistPtrs and
* other UserEditAtoms to point to their new positions
*/
- public void updateOtherRecordReferences(Hashtable<Integer,Integer> oldToNewReferencesLookup) {
+ public void updateOtherRecordReferences(Map<Integer,Integer> oldToNewReferencesLookup) {
// Look up the new positions of our preceding UserEditAtomOffset
if(lastUserEditAtomOffset != 0) {
Integer newLocation = oldToNewReferencesLookup.get(Integer.valueOf(lastUserEditAtomOffset));
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
-import java.util.Hashtable;
+import java.util.Map;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
// For position dependent records, hold where they were and now are
// As we go along, update, and hand over, to any Position Dependent
// records we happen across
- Hashtable<Integer,Integer> oldToNewPositions = new Hashtable<Integer,Integer>();
+ Map<Integer,Integer> oldToNewPositions = new HashMap<Integer,Integer>();
// First pass - figure out where all the position dependent
// records are going to end up, in the new scheme
// Find the location of the PersistPtrIncrementalBlocks and
// UserEditAtoms
Record[] r = wss.getRecords();
- Map<Integer,Record> pp = new Hashtable<Integer,Record>();
- Map<Integer,Object> ue = new Hashtable<Integer,Object>();
+ Map<Integer,Record> pp = new HashMap<Integer,Record>();
+ Map<Integer,Object> ue = new HashMap<Integer,Object>();
ue.put(Integer.valueOf(0),Integer.valueOf(0)); // Will show 0 if first
int pos = 0;
int lastUEPos = -1;
package org.apache.poi.hssf.util;
-import java.util.Hashtable;
+import java.util.Map;
import junit.framework.TestCase;
/**
}
public void testTrippletHash() {
- Hashtable tripplets = HSSFColor.getTripletHash();
+ Map<String, HSSFColor> tripplets = HSSFColor.getTripletHash();
assertEquals(
HSSFColor.MAROON.class,