import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
-
import org.apache.commons.lang.builder.CompareToBuilder;
+
+
/**
* Access table index
* @author Tim McCune
private static final int NEW_ENTRY_COLUMN_INDEX = -1;
private static final byte REVERSE_ORDER_FLAG = (byte)0x01;
+
+ private static final byte INDEX_NODE_PAGE_TYPE = (byte)0x03;
+ private static final byte INDEX_LEAF_PAGE_TYPE = (byte)0x04;
static final Comparator<byte[]> BYTE_CODE_COMPARATOR =
new Comparator<byte[]>() {
private String _name;
/** is this index a primary key */
private boolean _primaryKey;
-
+ /** FIXME, for now, we can't write multi-page indexes or indexes using the funky primary key compression scheme */
+ boolean _readOnly;
public Index(int parentPageNumber, PageChannel channel, JetFormat format) {
_parentPageNumber = parentPageNumber;
public Collection<Column> getColumns() {
return Collections.unmodifiableCollection(_columns.keySet());
}
-
+
public void update() throws IOException {
+ if(_readOnly) {
+ throw new UnsupportedOperationException(
+ "FIXME cannot write indexes of this type yet");
+ }
_pageChannel.writePage(write(), _pageNumber);
}
/**
* Write this index out to a buffer
*/
- public ByteBuffer write() throws IOException {
+ private ByteBuffer write() throws IOException {
ByteBuffer buffer = _pageChannel.createPageBuffer();
buffer.put((byte) 0x04); //Page type
buffer.put((byte) 0x01); //Unknown
}
/**
- * Read this index in from a buffer
- * @param buffer Buffer to read from
+ * Read this index in from a tableBuffer
+ * @param tableBuffer table definition buffer to read from initial info
* @param availableColumns Columns that this index may use
*/
- public void read(ByteBuffer buffer, List<Column> availableColumns)
+ public void read(ByteBuffer tableBuffer, List<Column> availableColumns)
throws IOException
{
for (int i = 0; i < MAX_COLUMNS; i++) {
- short columnNumber = buffer.getShort();
- Byte flags = new Byte(buffer.get());
+ short columnNumber = tableBuffer.getShort();
+ Byte flags = new Byte(tableBuffer.get());
if (columnNumber != COLUMN_UNUSED) {
_columns.put(availableColumns.get(columnNumber), flags);
}
}
- buffer.getInt(); //Forward past Unknown
- _pageNumber = buffer.getInt();
- buffer.position(buffer.position() + 10); //Forward past other stuff
+ tableBuffer.getInt(); //Forward past Unknown
+ _pageNumber = tableBuffer.getInt();
+ tableBuffer.position(tableBuffer.position() + 10); //Forward past other stuff
ByteBuffer indexPage = _pageChannel.createPageBuffer();
- _pageChannel.readPage(indexPage, _pageNumber);
- indexPage.position(_format.OFFSET_INDEX_ENTRY_MASK);
- byte[] entryMask = new byte[_format.SIZE_INDEX_ENTRY_MASK];
- indexPage.get(entryMask);
+
+ // find first leaf page
+ int leafPageNumber = _pageNumber;
+ while(true) {
+ _pageChannel.readPage(indexPage, leafPageNumber);
+
+ if(indexPage.get(0) == INDEX_NODE_PAGE_TYPE) {
+ // FIXME we can't modify this index at this point in time
+ _readOnly = true;
+
+ // found another node page
+ leafPageNumber = readNodePage(indexPage);
+ } else {
+ // found first leaf
+ indexPage.rewind();
+ break;
+ }
+ }
+
+ // read all leaf pages
+ while(true) {
+
+ leafPageNumber = readLeafPage(indexPage);
+ if(leafPageNumber != 0) {
+ // FIXME we can't modify this index at this point in time
+ _readOnly = true;
+
+ // found another one
+ _pageChannel.readPage(indexPage, leafPageNumber);
+
+ } else {
+ // all done
+ break;
+ }
+ }
+
+ }
+
+ /**
+ * Reads the first entry off of an index node page and returns the next page
+ * number.
+ */
+ private int readNodePage(ByteBuffer nodePage)
+ throws IOException
+ {
+ if(nodePage.get(0) != INDEX_NODE_PAGE_TYPE) {
+ throw new IOException("expected index node page, found " +
+ nodePage.get(0));
+ }
+
+ List<NodeEntry> nodeEntries = new ArrayList<NodeEntry>();
+ readIndexPage(nodePage, false, null, nodeEntries);
+
+ // grab the first entry
+ // FIXME, need to parse all...?
+ return nodeEntries.get(0).getSubPageNumber();
+ }
+
+ /**
+ * Reads an index leaf page.
+ * @return the next leaf page number, 0 if none
+ */
+ private int readLeafPage(ByteBuffer leafPage)
+ throws IOException
+ {
+ if(leafPage.get(0) != INDEX_LEAF_PAGE_TYPE) {
+ throw new IOException("expected index leaf page, found " +
+ leafPage.get(0));
+ }
+
+ // note, "header" data is in LITTLE_ENDIAN format, entry data is in
+ // BIG_ENDIAN format
+
+ int nextLeafPage = leafPage.getInt(_format.OFFSET_NEXT_INDEX_LEAF_PAGE);
+ readIndexPage(leafPage, true, _entries, null);
+
+ return nextLeafPage;
+ }
+
+ /**
+ * Reads an index page, populating the correct collection based on the page
+ * type (node or leaf).
+ */
+ private void readIndexPage(ByteBuffer indexPage, boolean isLeaf,
+ Collection<Entry> entries,
+ Collection<NodeEntry> nodeEntries)
+ throws IOException
+ {
+ // note, "header" data is in LITTLE_ENDIAN format, entry data is in
+ // BIG_ENDIAN format
+ int numCompressedBytes = indexPage.get(
+ _format.OFFSET_INDEX_COMPRESSED_BYTE_COUNT);
+ int entryMaskLength = _format.SIZE_INDEX_ENTRY_MASK;
+ int entryMaskPos = _format.OFFSET_INDEX_ENTRY_MASK;
+ int entryPos = entryMaskPos + _format.SIZE_INDEX_ENTRY_MASK;
int lastStart = 0;
- int nextEntryIndex = 0;
- for (int i = 0; i < entryMask.length; i++) {
+ byte[] valuePrefix = null;
+ boolean firstEntry = true;
+ for (int i = 0; i < entryMaskLength; i++) {
+ byte entryMask = indexPage.get(entryMaskPos + i);
for (int j = 0; j < 8; j++) {
- if ((entryMask[i] & (1 << j)) != 0) {
+ if ((entryMask & (1 << j)) != 0) {
int length = i * 8 + j - lastStart;
- Entry e = new Entry(indexPage, nextEntryIndex++);
- _entries.add(e);
- lastStart += length;
+ indexPage.position(entryPos + lastStart);
+ if(isLeaf) {
+ entries.add(new Entry(indexPage, length, valuePrefix));
+ } else {
+ nodeEntries.add(new NodeEntry(indexPage, length, valuePrefix));
+ }
+
+ // read any shared "compressed" bytes
+ if(firstEntry) {
+ firstEntry = false;
+ if(numCompressedBytes > 0) {
+ // FIXME we can't modify this index at this point in time
+ _readOnly = true;
+
+ valuePrefix = new byte[numCompressedBytes];
+ indexPage.position(entryPos + lastStart);
+ indexPage.get(valuePrefix);
+ }
+ }
+
+ lastStart += length;
}
}
}
}
+
/**
* Add a row to this index
* @param row Row to add
{
_entries.add(new Entry(row, pageNumber, rowNumber));
}
-
+
+ @Override
public String toString() {
StringBuilder rtn = new StringBuilder();
rtn.append("\tName: " + _name);
/**
- * A single entry in an index (points to a single row)
+ * A single leaf entry in an index (points to a single row)
*/
private class Entry implements Comparable<Entry> {
/**
* Read an existing entry in from a buffer
*/
- public Entry(ByteBuffer buffer, int nextEntryIndex) throws IOException {
+ public Entry(ByteBuffer buffer, int length, byte[] valuePrefix)
+ throws IOException
+ {
for(Map.Entry<Column, Byte> entry : _columns.entrySet()) {
Column col = entry.getKey();
Byte flags = entry.getValue();
_entryColumns.add(newEntryColumn(col)
- .initFromBuffer(buffer, nextEntryIndex, flags));
+ .initFromBuffer(buffer, flags, valuePrefix));
}
- // 3-byte int in big endian order! Gotta love those kooky MS
- // programmers. :)
_page = ByteUtil.get3ByteInt(buffer, ByteOrder.BIG_ENDIAN);
_row = buffer.get();
}
buffer.put(_row);
}
+ @Override
public String toString() {
return ("Page = " + _page + ", Row = " + _row + ", Columns = " + _entryColumns + "\n");
}
* Initialize from a buffer
*/
protected abstract EntryColumn initFromBuffer(ByteBuffer buffer,
- int entryIndex,
- byte flags)
+ byte flags,
+ byte[] valuePrefix)
throws IOException;
protected abstract boolean isNullValue();
*/
@Override
protected EntryColumn initFromBuffer(ByteBuffer buffer,
- int entryIndex,
- byte flags)
+ byte flags,
+ byte[] valuePrefix)
throws IOException
{
- byte flag = buffer.get();
+
+
+ byte flag = ((valuePrefix == null) ? buffer.get() : valuePrefix[0]);
// FIXME, reverse is 0x80, reverse null is 0xFF
if (flag != (byte) 0) {
- byte[] data = new byte[_column.getType().getFixedSize()];
- buffer.get(data);
+ byte[] data = new byte[_column.getType().getFixedSize()];
+ int numPrefixBytes = ((valuePrefix == null) ? 0 :
+ (valuePrefix.length - 1));
+ int dataOffset = 0;
+ if((valuePrefix != null) && (valuePrefix.length > 1)) {
+ System.arraycopy(valuePrefix, 1, data, 0,
+ (valuePrefix.length - 1));
+ dataOffset += (valuePrefix.length - 1);
+ }
+ buffer.get(data, dataOffset, (data.length - dataOffset));
_value = (Comparable) _column.read(data, ByteOrder.BIG_ENDIAN);
//ints and shorts are stored in index as value + 2147483648
(long) Integer.MAX_VALUE + 1L));
}
}
-
+
return this;
}
*/
@Override
protected EntryColumn initFromBuffer(ByteBuffer buffer,
- int entryIndex,
- byte flags)
+ byte flags,
+ byte[] valuePrefix)
throws IOException
{
- byte flag = buffer.get();
+ byte flag = ((valuePrefix == null) ? buffer.get() : valuePrefix[0]);
// FIXME, reverse is 0x80, reverse null is 0xFF
// end flag is FE, post extra bytes is FF 00
// extra bytes are inverted, so are normal bytes
++endPos;
}
+ // FIXME, prefix could probably include extraBytes...
+
// read index bytes
- _valueBytes = new byte[endPos - buffer.position()];
- buffer.get(_valueBytes);
+ int numPrefixBytes = ((valuePrefix == null) ? 0 :
+ (valuePrefix.length - 1));
+ int dataOffset = 0;
+ _valueBytes = new byte[(endPos - buffer.position()) +
+ numPrefixBytes];
+ if(numPrefixBytes > 0) {
+ System.arraycopy(valuePrefix, 1, _valueBytes, 0, numPrefixBytes);
+ dataOffset += numPrefixBytes;
+ }
+ buffer.get(_valueBytes, dataOffset,
+ (_valueBytes.length - dataOffset));
// read end codes byte
buffer.get();
}
}
-
+
+ /**
+ * A single node entry in an index (points to a sub-page in the index)
+ */
+ private class NodeEntry extends Entry {
+
+ /** index page number of the page to which this node entry refers */
+ private int _subPageNumber;
+
+
+ /**
+ * Read an existing node entry in from a buffer
+ */
+ public NodeEntry(ByteBuffer buffer, int length, byte[] valuePrefix)
+ throws IOException
+ {
+ super(buffer, length, valuePrefix);
+
+ _subPageNumber = ByteUtil.getInt(buffer, ByteOrder.BIG_ENDIAN);
+ }
+
+ public int getSubPageNumber() {
+ return _subPageNumber;
+ }
+
+ public String toString() {
+ return ("Page = " + getPage() + ", Row = " + getRow() +
+ ", SubPage = " + _subPageNumber +
+ ", Columns = " + getEntryColumns() + "\n");
+ }
+
+ }
+
}
package com.healthmarketscience.jackcess;
import java.io.File;
+import java.io.FileInputStream;
import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
import java.io.PrintWriter;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
assertTrue(!bogusFile.exists());
}
- public void testPrimaryKey() throws Exception {
- Table table = open().getTable("Table1");
- Map<String, Boolean> foundPKs = new HashMap<String, Boolean>();
- for(Index index : table.getIndexes()) {
- foundPKs.put(index.getColumns().iterator().next().getName(),
- index.isPrimaryKey());
- }
- Map<String, Boolean> expectedPKs = new HashMap<String, Boolean>();
- expectedPKs.put("A", Boolean.TRUE);
- expectedPKs.put("B", Boolean.FALSE);
- assertEquals(expectedPKs, foundPKs);
- }
-
public void testReadWithDeletedCols() throws Exception {
Table table = Database.open(new File("test/data/delColTest.mdb")).getTable("Table1");
}
}
- public void testIndexSlots() throws Exception
- {
- Database mdb = Database.open(new File("test/data/indexTest.mdb"));
-
- Table table = mdb.getTable("Table1");
- assertEquals(4, table.getIndexes().size());
- assertEquals(4, table.getIndexSlotCount());
-
- table = mdb.getTable("Table2");
- assertEquals(2, table.getIndexes().size());
- assertEquals(3, table.getIndexSlotCount());
-
- table = mdb.getTable("Table3");
- assertEquals(2, table.getIndexes().size());
- assertEquals(3, table.getIndexSlotCount());
- }
-
public void testMultiPageTableDef() throws Exception
{
List<Column> columns = open().getTable("Table2").getColumns();
writer.println(row);
}
}
+
+ static void copyFile(File srcFile, File dstFile)
+ throws IOException
+ {
+ // FIXME should really be using commons io FileUtils here, but don't want
+ // to add dep for one simple test method
+ byte[] buf = new byte[1024];
+ OutputStream ostream = new FileOutputStream(dstFile);
+ InputStream istream = new FileInputStream(srcFile);
+ try {
+ int numBytes = 0;
+ while((numBytes = istream.read(buf)) >= 0) {
+ ostream.write(buf, 0, numBytes);
+ }
+ } finally {
+ ostream.close();
+ }
+ }
}