]> source.dussan.org Git - vaadin-framework.git/commitdiff
Rewrite HierarchyMapper to consistently handle changes
authorTeemu Suo-Anttila <tsuoanttila@users.noreply.github.com>
Thu, 15 Jun 2017 12:44:56 +0000 (15:44 +0300)
committerIlia Motornyi <elmot@vaadin.com>
Thu, 15 Jun 2017 12:44:56 +0000 (15:44 +0300)
Fixes #9449
Fixes #9490
Fixes #9448
Fixes #9465

client/src/main/java/com/vaadin/client/ui/treegrid/TreeGridConnector.java
server/src/main/java/com/vaadin/data/provider/DataCommunicator.java
server/src/main/java/com/vaadin/data/provider/HierarchicalDataCommunicator.java
server/src/main/java/com/vaadin/data/provider/HierarchyMapper.java
server/src/main/java/com/vaadin/ui/TreeGrid.java
server/src/test/java/com/vaadin/data/provider/HierarchyMapperTest.java [deleted file]
server/src/test/java/com/vaadin/data/provider/hierarchical/HierarchyMapperWithDataTest.java [new file with mode: 0644]
server/src/test/java/com/vaadin/data/provider/hierarchical/Node.java [new file with mode: 0644]
shared/src/main/java/com/vaadin/shared/ui/treegrid/FocusParentRpc.java
uitest/src/main/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchy.java
uitest/src/test/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchyTest.java

index e92747dde9d621b9786ae31334710473c70eddfa..48ba0562fd8e46d8d57ca03af721e959e0801359 100644 (file)
@@ -376,7 +376,7 @@ public class TreeGridConnector extends GridConnector {
                             // navigate up
                             int columnIndex = cell.getColumnIndex();
                             getRpcProxy(FocusParentRpc.class).focusParent(
-                                    cell.getRowIndex(), columnIndex);
+                                    getRowKey(cell.getRow()), columnIndex);
                         } else if (isCollapseAllowed(rowDescription)) {
                             setCollapsed(cell.getRowIndex(), true);
                         }
index 7427e7a8c573c9c74c202ddfe35bd1d7fdd371ae..230eab8d403cd331bea20b8625882a11c0829ffb 100644 (file)
@@ -25,7 +25,6 @@ import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
-import java.util.Optional;
 import java.util.Set;
 import java.util.function.Function;
 import java.util.stream.Collectors;
@@ -193,12 +192,13 @@ public class DataCommunicator<T> extends AbstractExtension {
     private final Collection<DataGenerator<T>> generators = new LinkedHashSet<>();
     private final ActiveDataHandler handler = new ActiveDataHandler();
 
-    /** Empty default data provider */
+    /** Empty default data provider. */
     protected DataProvider<T, ?> dataProvider = new CallbackDataProvider<>(
             q -> Stream.empty(), q -> 0);
     private final DataKeyMapper<T> keyMapper;
 
-    protected boolean reset = false;
+    /** Boolean for pending hard reset. */
+    protected boolean reset = true;
     private final Set<T> updatedData = new HashSet<>();
     private int minPushSize = 40;
     private Range pushRows = Range.withLength(0, minPushSize);
@@ -323,9 +323,7 @@ public class DataCommunicator<T> extends AbstractExtension {
         }
 
         if (initial || reset) {
-            @SuppressWarnings({ "rawtypes", "unchecked" })
-            int dataProviderSize = getDataProvider().size(new Query(filter));
-            rpc.reset(dataProviderSize);
+            rpc.reset(getDataProviderSize());
         }
 
         Range requestedRows = getPushRows();
@@ -334,11 +332,7 @@ public class DataCommunicator<T> extends AbstractExtension {
             int offset = requestedRows.getStart();
             int limit = requestedRows.length();
 
-            @SuppressWarnings({ "rawtypes", "unchecked" })
-            List<T> rowsToPush = (List<T>) getDataProvider()
-                    .fetch(new Query(offset, limit, backEndSorting,
-                            inMemorySorting, filter))
-                    .collect(Collectors.toList());
+            List<T> rowsToPush = fetchItemsWithRange(offset, limit);
 
             if (!initial && !reset && rowsToPush.size() == 0) {
                 triggerReset = true;
@@ -361,6 +355,13 @@ public class DataCommunicator<T> extends AbstractExtension {
         updatedData.clear();
     }
 
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    protected List<T> fetchItemsWithRange(int offset, int limit) {
+        return (List<T>) getDataProvider().fetch(new Query(offset, limit,
+                backEndSorting, inMemorySorting, filter))
+                .collect(Collectors.toList());
+    }
+
     /**
      * Adds a data generator to this data communicator. Data generators can be
      * used to insert custom data to the rows sent to the client. If the data
@@ -480,15 +481,15 @@ public class DataCommunicator<T> extends AbstractExtension {
     }
 
     /**
-     * Informs the DataProvider that the collection has changed.
+     * Method for internal reset from a change in the component, requiring a
+     * full data update.
      */
     public void reset() {
-        if (reset) {
-            return;
+        // Only needed if a full reset is not pending.
+        if (!reset) {
+            // Soft reset through client-side re-request.
+            getClientRpc().reset(getDataProviderSize());
         }
-
-        reset = true;
-        markAsDirty();
     }
 
     /**
@@ -641,7 +642,7 @@ public class DataCommunicator<T> extends AbstractExtension {
         if (isAttached()) {
             attachDataProviderListener();
         }
-        reset();
+        hardReset();
 
         return filter -> {
             if (this.dataProvider != dataProvider) {
@@ -650,12 +651,27 @@ public class DataCommunicator<T> extends AbstractExtension {
             }
 
             if (!Objects.equals(this.filter, filter)) {
-                this.filter = filter;
+                setFilter(filter);
                 reset();
             }
         };
     }
 
+    /**
+     * Sets the filter for this DataCommunicator. This method is used by user
+     * through the consumer method from {@link #setDataProvider} and should not
+     * be called elsewhere.
+     * 
+     * @param filter
+     *            the filter
+     * 
+     * @param <F>
+     *            the filter type
+     */
+    protected <F> void setFilter(F filter) {
+        this.filter = filter;
+    }
+
     /**
      * Set minimum size of data which will be sent to the client when data
      * source is set.
@@ -693,6 +709,17 @@ public class DataCommunicator<T> extends AbstractExtension {
         return minPushSize;
     }
 
+    /**
+     * Getter method for finding the size of DataProvider. Can be overridden by
+     * a subclass that uses a specific type of DataProvider and/or query.
+     * 
+     * @return the size of data provider with current filter
+     */
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    protected int getDataProviderSize() {
+        return getDataProvider().size(new Query(getFilter()));
+    }
+
     @Override
     protected DataCommunicatorState getState(boolean markAsDirty) {
         return (DataCommunicatorState) super.getState(markAsDirty);
@@ -713,12 +740,20 @@ public class DataCommunicator<T> extends AbstractExtension {
                             generators.forEach(g -> g.refreshData(item));
                             refresh(item);
                         } else {
-                            reset();
+                            hardReset();
                         }
                     });
                 });
     }
 
+    private void hardReset() {
+        if (reset) {
+            return;
+        }
+        reset = true;
+        markAsDirty();
+    }
+
     private void detachDataProviderListener() {
         if (dataProviderUpdateRegistration != null) {
             dataProviderUpdateRegistration.remove();
index 715538112e83f9c7aecfd0f36f35f2f4c3e51bf7..3e9eb01921d37c386fab81b4acd276bed579bc6f 100644 (file)
  */
 package com.vaadin.data.provider;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
+import java.util.Comparator;
 import java.util.List;
 import java.util.Objects;
 import java.util.Optional;
-import java.util.Set;
-import java.util.function.BiConsumer;
-import java.util.logging.Logger;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
 import com.vaadin.data.TreeData;
-import com.vaadin.data.provider.HierarchyMapper.TreeLevelQuery;
-import com.vaadin.data.provider.HierarchyMapper.TreeNode;
 import com.vaadin.server.SerializableConsumer;
 import com.vaadin.shared.Range;
-import com.vaadin.shared.data.HierarchicalDataCommunicatorConstants;
 import com.vaadin.shared.extension.datacommunicator.HierarchicalDataCommunicatorState;
 import com.vaadin.ui.ItemCollapseAllowedProvider;
 
-import elemental.json.Json;
-import elemental.json.JsonArray;
-import elemental.json.JsonObject;
-
 /**
  * Data communicator that handles requesting hierarchical data from
  * {@link HierarchicalDataProvider} and sending it to client side.
@@ -51,35 +39,20 @@ import elemental.json.JsonObject;
  */
 public class HierarchicalDataCommunicator<T> extends DataCommunicator<T> {
 
-    private static final Logger LOGGER = Logger
-            .getLogger(HierarchicalDataCommunicator.class.getName());
-
-    /**
-     * The amount of root level nodes to fetch and push to the client.
-     */
-    private static final int INITIAL_FETCH_SIZE = 100;
-
-    private HierarchyMapper mapper = new HierarchyMapper();
-
-    private Set<String> rowKeysPendingExpand = new HashSet<>();
+    private HierarchyMapper<T, ?> mapper;
 
     /**
      * Collapse allowed provider used to allow/disallow collapsing nodes.
      */
     private ItemCollapseAllowedProvider<T> itemCollapseAllowedProvider = t -> true;
 
-    /**
-     * The captured client side cache size.
-     */
-    private int latestCacheSize = INITIAL_FETCH_SIZE;
-
     /**
      * Construct a new hierarchical data communicator backed by a
      * {@link TreeDataProvider}.
      */
     public HierarchicalDataCommunicator() {
         super();
-        dataProvider = new TreeDataProvider<>(new TreeData<>());
+        setDataProvider(new TreeDataProvider<T>(new TreeData<>()), null);
     }
 
     @Override
@@ -93,225 +66,11 @@ public class HierarchicalDataCommunicator<T> extends DataCommunicator<T> {
     }
 
     @Override
-    protected void sendDataToClient(boolean initial) {
-        // on purpose do not call super
-        if (getDataProvider() == null) {
-            return;
-        }
-
-        if (initial || reset) {
-            loadInitialData();
-        } else {
-            loadRequestedRows();
-        }
-
-        if (!getUpdatedData().isEmpty()) {
-            JsonArray dataArray = Json.createArray();
-            int i = 0;
-            for (T data : getUpdatedData()) {
-                dataArray.set(i++, createDataObject(data, -1));
-            }
-            getClientRpc().updateData(dataArray);
-            getUpdatedData().clear();
-        }
-    }
-
-    private void loadInitialData() {
-        int rootSize = doSizeQuery(null);
-        mapper.reset(rootSize);
-
-        if (rootSize != 0) {
-            Range initialRange = getInitialRowsToPush(rootSize);
-            assert !initialRange
-                    .isEmpty() : "Initial range should never be empty.";
-            Stream<T> rootItems = doFetchQuery(initialRange.getStart(),
-                    initialRange.length(), null);
-
-            // for now just fetching data for the root level as everything is
-            // collapsed by default
-            List<T> items = rootItems.collect(Collectors.toList());
-            List<JsonObject> dataObjects = items.stream()
-                    .map(item -> createDataObject(item, 0))
-                    .collect(Collectors.toList());
-
-            getClientRpc().reset(rootSize);
-            sendData(0, dataObjects);
-            getActiveDataHandler().addActiveData(items.stream());
-            getActiveDataHandler().cleanUp(items.stream());
-        } else {
-            getClientRpc().reset(0);
-        }
-
-        setPushRows(Range.withLength(0, 0));
-        // any updated data is ignored at this point
-        getUpdatedData().clear();
-        reset = false;
-    }
-
-    private void loadRequestedRows() {
-        final Range requestedRows = getPushRows();
-        if (!requestedRows.isEmpty()) {
-            doPushRows(requestedRows, 0);
-        }
-
-        setPushRows(Range.withLength(0, 0));
-    }
-
-    /**
-     * Attempts to push the requested range of rows to the client. Will trigger
-     * a reset if the data provider is unable to provide the requested number of
-     * items.
-     *
-     * @param requestedRows
-     *            the range of rows to push
-     * @param insertRowsCount
-     *            number of rows to insert, beginning at the start index of
-     *            {@code requestedRows}, 0 to not insert any rows
-     * @return {@code true} if the range was successfully pushed to the client,
-     *         {@code false} if the push was unsuccessful and a reset was
-     *         triggered
-     */
-    private boolean doPushRows(final Range requestedRows, int insertRowsCount) {
-        Stream<TreeLevelQuery> levelQueries = mapper.splitRangeToLevelQueries(
-                requestedRows.getStart(), requestedRows.getEnd() - 1);
-
-        JsonObject[] dataObjects = new JsonObject[requestedRows.length()];
-        BiConsumer<JsonObject, Integer> rowDataMapper = (object,
-                index) -> dataObjects[index
-                        - requestedRows.getStart()] = object;
-        List<T> fetchedItems = new ArrayList<>(dataObjects.length);
-
-        levelQueries.forEach(query -> {
-            List<T> results = doFetchQuery(query.startIndex, query.size,
-                    getKeyMapper().get(query.node.getParentKey()))
-                            .collect(Collectors.toList());
-
-            fetchedItems.addAll(results);
-            List<JsonObject> rowData = results.stream()
-                    .map(item -> createDataObject(item, query.depth))
-                    .collect(Collectors.toList());
-            mapper.reorderLevelQueryResultsToFlatOrdering(rowDataMapper, query,
-                    rowData);
-        });
-
-        if (hasNullItems(dataObjects, requestedRows)) {
-            reset();
-            return false;
-        }
-
-        if (insertRowsCount > 0) {
-            getClientRpc().insertRows(requestedRows.getStart(),
-                    insertRowsCount);
-        }
-
-        sendData(requestedRows.getStart(), Arrays.asList(dataObjects));
-        getActiveDataHandler().addActiveData(fetchedItems.stream());
-        getActiveDataHandler().cleanUp(fetchedItems.stream());
-        return true;
-    }
-
-    private boolean hasNullItems(JsonObject[] dataObjects,
-            Range requestedRange) {
-        for (JsonObject object : dataObjects) {
-            if (object == null) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    private JsonObject createDataObject(T item, int depth) {
-        JsonObject dataObject = getDataObject(item);
-
-        JsonObject hierarchyData = Json.createObject();
-        if (depth != -1) {
-            hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_DEPTH,
-                    depth);
-        }
-
-        boolean isLeaf = !getDataProvider().hasChildren(item);
-        if (isLeaf) {
-            hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_LEAF,
-                    true);
-        } else {
-            String key = getKeyMapper().key(item);
-            hierarchyData.put(
-                    HierarchicalDataCommunicatorConstants.ROW_COLLAPSED,
-                    mapper.isCollapsed(key));
-            hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_LEAF,
-                    false);
-            hierarchyData.put(
-                    HierarchicalDataCommunicatorConstants.ROW_COLLAPSE_ALLOWED,
-                    itemCollapseAllowedProvider.test(item));
-        }
-
-        // add hierarchy information to row as metadata
-        dataObject.put(
-                HierarchicalDataCommunicatorConstants.ROW_HIERARCHY_DESCRIPTION,
-                hierarchyData);
-
-        return dataObject;
-    }
-
-    private void sendData(int startIndex, List<JsonObject> dataObjects) {
-        JsonArray dataArray = Json.createArray();
-        int i = 0;
-        for (JsonObject dataObject : dataObjects) {
-            dataArray.set(i++, dataObject);
-        }
-
-        getClientRpc().setData(startIndex, dataArray);
-    }
-
-    /**
-     * Returns the range of rows to push on initial response.
-     *
-     * @param rootLevelSize
-     *            the amount of rows on the root level
-     * @return the range of rows to push initially
-     */
-    private Range getInitialRowsToPush(int rootLevelSize) {
-        // TODO optimize initial level to avoid unnecessary requests
-        return Range.between(0, Math.min(rootLevelSize, latestCacheSize));
-    }
-
-    @SuppressWarnings({ "rawtypes", "unchecked" })
-    private Stream<T> doFetchQuery(int start, int length, T parentItem) {
-        return getDataProvider()
-                .fetch(new HierarchicalQuery(start, length, getBackEndSorting(),
-                        getInMemorySorting(), getFilter(), parentItem));
-    }
-
-    @SuppressWarnings({ "unchecked", "rawtypes" })
-    private int doSizeQuery(T parentItem) {
-        return getDataProvider()
-                .getChildCount(new HierarchicalQuery(getFilter(), parentItem));
-    }
-
-    @Override
-    protected void onRequestRows(int firstRowIndex, int numberOfRows,
-            int firstCachedRowIndex, int cacheSize) {
-        super.onRequestRows(firstRowIndex, numberOfRows, firstCachedRowIndex,
-                cacheSize);
-    }
-
-    @Override
-    protected void onDropRows(JsonArray keys) {
-        for (int i = 0; i < keys.length(); i++) {
-            // cannot drop keys of expanded rows, parents of expanded rows or
-            // rows that are pending expand
-            String itemKey = keys.getString(i);
-            if (!mapper.isKeyStored(itemKey)
-                    && !rowKeysPendingExpand.contains(itemKey)) {
-                getActiveDataHandler().dropActiveData(itemKey);
-            }
-        }
-    }
-
-    @Override
-    protected void dropAllData() {
-        super.dropAllData();
-        rowKeysPendingExpand.clear();
+    protected List<T> fetchItemsWithRange(int offset, int limit) {
+        // Instead of adding logic to this class, delegate request to the
+        // separate object handling hierarchies.
+        return mapper.fetchItems(Range.withLength(offset, limit))
+                .collect(Collectors.toList());
     }
 
     @Override
@@ -335,7 +94,25 @@ public class HierarchicalDataCommunicator<T> extends DataCommunicator<T> {
      */
     public <F> SerializableConsumer<F> setDataProvider(
             HierarchicalDataProvider<T, F> dataProvider, F initialFilter) {
-        return super.setDataProvider(dataProvider, initialFilter);
+        SerializableConsumer<F> consumer = super.setDataProvider(dataProvider,
+                initialFilter);
+
+        // Remove old mapper
+        if (mapper != null) {
+            removeDataGenerator(mapper);
+        }
+        mapper = new HierarchyMapper<>(dataProvider);
+
+        // Set up mapper for requests
+        mapper.setBackEndSorting(getBackEndSorting());
+        mapper.setInMemorySorting(getInMemorySorting());
+        mapper.setFilter(getFilter());
+        mapper.setItemCollapseAllowedProvider(getItemCollapseAllowedProvider());
+
+        // Provide hierarchy data to json
+        addDataGenerator(mapper);
+
+        return consumer;
     }
 
     /**
@@ -357,7 +134,9 @@ public class HierarchicalDataCommunicator<T> extends DataCommunicator<T> {
     public <F> SerializableConsumer<F> setDataProvider(
             DataProvider<T, F> dataProvider, F initialFilter) {
         if (dataProvider instanceof HierarchicalDataProvider) {
-            return super.setDataProvider(dataProvider, initialFilter);
+            return setDataProvider(
+                    (HierarchicalDataProvider<T, F>) dataProvider,
+                    initialFilter);
         }
         throw new IllegalArgumentException(
                 "Only " + HierarchicalDataProvider.class.getName()
@@ -365,153 +144,100 @@ public class HierarchicalDataCommunicator<T> extends DataCommunicator<T> {
     }
 
     /**
-     * Collapses given row, removing all its subtrees. Calling this method will
+     * Collapses given item, removing all its subtrees. Calling this method will
      * have no effect if the row is already collapsed.
      *
-     * @param collapsedRowKey
-     *            the key of the row, not {@code null}
-     * @param collapsedRowIndex
-     *            the index of row to collapse
-     * @return {@code true} if the row was collapsed, {@code false} otherwise
+     * @param item
+     *            the item to collapse
      */
-    public boolean doCollapse(String collapsedRowKey, int collapsedRowIndex) {
-        if (collapsedRowIndex < 0 | collapsedRowIndex >= mapper.getTreeSize()) {
-            throw new IllegalArgumentException("Invalid row index "
-                    + collapsedRowIndex + " when tree grid size of "
-                    + mapper.getTreeSize());
-        }
-        Objects.requireNonNull(collapsedRowKey, "Row key cannot be null");
-        T collapsedItem = getKeyMapper().get(collapsedRowKey);
-        Objects.requireNonNull(collapsedItem,
-                "Cannot find item for given key " + collapsedItem);
-
-        if (mapper.isCollapsed(collapsedRowKey)) {
-            return false;
+    public void collapse(T item) {
+        if (mapper.isExpanded(item)) {
+            doCollapse(item, mapper.getIndexOf(item));
         }
-        int collapsedSubTreeSize = mapper.collapse(collapsedRowKey,
-                collapsedRowIndex);
-        getClientRpc().removeRows(collapsedRowIndex + 1, collapsedSubTreeSize);
-
-        // FIXME seems like a slight overkill to do this just for refreshing
-        // expanded status
-        refresh(collapsedItem);
-        return true;
     }
 
     /**
-     * Expands the given row. Calling this method will have no effect if the row
-     * is already expanded.
+     * Collapses given item, removing all its subtrees. Calling this method will
+     * have no effect if the row is already collapsed. The index is provided by
+     * the client-side or calculated from a full data request.
+     *
+     * @see #collapse(Object)
      *
-     * @param expandedRowKey
-     *            the key of the row, not {@code null}
-     * @param expandedRowIndex
-     *            the index of the row to expand
-     * @param userOriginated
-     *            whether this expand was originated from the server or client
-     * @return {@code true} if the row was expanded, {@code false} otherwise
+     * @param item
+     *            the item to collapse
+     * @param index
+     *            the index of the item
      */
-    public boolean doExpand(String expandedRowKey, final int expandedRowIndex,
-            boolean userOriginated) {
-        if (!userOriginated && !rowKeysPendingExpand.contains(expandedRowKey)) {
-            return false;
-        }
-        if (expandedRowIndex < 0 | expandedRowIndex >= mapper.getTreeSize()) {
-            throw new IllegalArgumentException("Invalid row index "
-                    + expandedRowIndex + " when tree grid size of "
-                    + mapper.getTreeSize());
-        }
-        Objects.requireNonNull(expandedRowKey, "Row key cannot be null");
-        final T expandedItem = getKeyMapper().get(expandedRowKey);
-        Objects.requireNonNull(expandedItem,
-                "Cannot find item for given key " + expandedRowKey);
-
-        int expandedNodeSize = doSizeQuery(expandedItem);
-        if (expandedNodeSize == 0) {
-            reset();
-            return false;
-        }
-
-        if (!mapper.isCollapsed(expandedRowKey)) {
-            return false;
-        }
-        expandedNodeSize = mapper.expand(expandedRowKey, expandedRowIndex,
-                expandedNodeSize);
-        rowKeysPendingExpand.remove(expandedRowKey);
-
-        boolean success = doPushRows(
-                Range.withLength(expandedRowIndex + 1,
-                        Math.min(expandedNodeSize, latestCacheSize)),
-                expandedNodeSize);
-
-        if (success) {
-            // FIXME seems like a slight overkill to do this just for refreshing
-            // expanded status
-            refresh(expandedItem);
-            return true;
+    public void doCollapse(T item, Optional<Integer> index) {
+        if (mapper.isExpanded(item)) {
+            Range removedRows = mapper.doCollapse(item, index);
+            if (!reset && !removedRows.isEmpty()) {
+                getClientRpc().removeRows(removedRows.getStart(),
+                        removedRows.length());
+            }
+            refresh(item);
         }
-        return false;
     }
 
     /**
-     * Set an item as pending expansion.
-     * <p>
-     * Calling this method reserves a communication key for the item that is
-     * guaranteed to not be invalidated until the item is expanded. Has no
-     * effect and returns an empty optional if the given item is already
-     * expanded or has no children.
+     * Expands the given item. Calling this method will have no effect if the
+     * row is already expanded.
      *
      * @param item
-     *            the item to set as pending expansion
-     * @return an optional of the communication key used for the item, empty if
-     *         the item cannot be expanded
+     *            the item to expand
      */
-    public Optional<String> setPendingExpand(T item) {
-        Objects.requireNonNull(item, "Item cannot be null");
-        if (getKeyMapper().has(item)
-                && !mapper.isCollapsed(getKeyMapper().key(item))) {
-            // item is already expanded
-            return Optional.empty();
+    public void expand(T item) {
+        if (!mapper.isExpanded(item) && mapper.hasChildren(item)) {
+            doExpand(item, mapper.getIndexOf(item));
         }
-        if (!getDataProvider().hasChildren(item)) {
-            // ignore item with no children
-            return Optional.empty();
-        }
-        String key = getKeyMapper().key(item);
-        rowKeysPendingExpand.add(key);
-        return Optional.of(key);
     }
 
     /**
-     * Collapse an item.
-     * <p>
-     * This method will either collapse an item directly, or remove its pending
-     * expand status. If the item is not expanded or pending expansion, calling
-     * this method has no effect.
+     * Expands the given item at given index. Calling this method will have no
+     * effect if the row is already expanded. The index is provided by the
+     * client-side or calculated from a full data request.
+     *
+     * @see #expand(Object)
      *
      * @param item
-     *            the item to collapse
-     * @return an optional of the communication key used for the item, empty if
-     *         the item cannot be collapsed
+     *            the item to expand
+     * @param index
+     *            the index of the item
      */
-    public Optional<String> collapseItem(T item) {
-        Objects.requireNonNull(item, "Item cannot be null");
-        if (!getKeyMapper().has(item)) {
-            // keymapper should always have items that are expanded or pending
-            // expand
-            return Optional.empty();
-        }
-        String nodeKey = getKeyMapper().key(item);
-        Optional<TreeNode> node = mapper.getNodeForKey(nodeKey);
-        if (node.isPresent()) {
-            rowKeysPendingExpand.remove(nodeKey);
-            doCollapse(nodeKey, node.get().getStartIndex() - 1);
-            return Optional.of(nodeKey);
-        }
-        if (rowKeysPendingExpand.contains(nodeKey)) {
-            rowKeysPendingExpand.remove(nodeKey);
-            return Optional.of(nodeKey);
+    public void doExpand(T item, Optional<Integer> index) {
+        if (!mapper.isExpanded(item)) {
+            Range addedRows = mapper.doExpand(item, index);
+            if (!reset && !addedRows.isEmpty()) {
+                int start = addedRows.getStart();
+                getClientRpc().insertRows(start, addedRows.length());
+                Stream<T> children = mapper.fetchItems(item,
+                        Range.withLength(0, addedRows.length()));
+                pushData(start, children.collect(Collectors.toList()));
+            }
+            refresh(item);
         }
-        return Optional.empty();
+    }
+
+    /**
+     * Returns whether given item has children.
+     * 
+     * @param item
+     *            the item to test
+     * @return {@code true} if item has children; {@code false} if not
+     */
+    public boolean hasChildren(T item) {
+        return mapper.hasChildren(item);
+    }
+
+    /**
+     * Returns whether given item is expanded.
+     * 
+     * @param item
+     *            the item to test
+     * @return {@code true} if item is expanded; {@code false} if not
+     */
+    public boolean isExpanded(T item) {
+        return mapper.isExpanded(item);
     }
 
     /**
@@ -529,19 +255,21 @@ public class HierarchicalDataCommunicator<T> extends DataCommunicator<T> {
             ItemCollapseAllowedProvider<T> provider) {
         Objects.requireNonNull(provider, "Provider can't be null");
         itemCollapseAllowedProvider = provider;
+        // Update hierarchy mapper
+        mapper.setItemCollapseAllowedProvider(provider);
 
         getActiveDataHandler().getActiveData().values().forEach(this::refresh);
     }
 
     /**
-     * Returns parent index for the row or {@code null}
+     * Returns parent index for the row or {@code null}.
      *
-     * @param rowIndex
-     *            the row index
+     * @param item
+     *            the item to find the parent of
      * @return the parent index or {@code null} for top-level items
      */
-    public Integer getParentIndex(int rowIndex) {
-        return mapper.getParentIndex(rowIndex);
+    public Integer getParentIndex(T item) {
+        return mapper.getParentIndex(item);
     }
 
     /**
@@ -552,4 +280,33 @@ public class HierarchicalDataCommunicator<T> extends DataCommunicator<T> {
     public ItemCollapseAllowedProvider<T> getItemCollapseAllowedProvider() {
         return itemCollapseAllowedProvider;
     }
+
+    @Override
+    protected int getDataProviderSize() {
+        return mapper.getTreeSize();
+    }
+
+    @Override
+    public void setBackEndSorting(List<QuerySortOrder> sortOrder) {
+        if (mapper != null) {
+            mapper.setBackEndSorting(sortOrder);
+        }
+        super.setBackEndSorting(sortOrder);
+    }
+
+    @Override
+    public void setInMemorySorting(Comparator<T> comparator) {
+        if (mapper != null) {
+            mapper.setInMemorySorting(comparator);
+        }
+        super.setInMemorySorting(comparator);
+    }
+
+    @Override
+    protected <F> void setFilter(F filter) {
+        if (mapper != null) {
+            mapper.setFilter(filter);
+        }
+        super.setFilter(filter);
+    }
 }
index ab080aa450f34de8f4a6830cbb3fb1f475dd8156..ebed464fdee8c11fee35bd3cbc7f237c77ea2e64 100644 (file)
  */
 package com.vaadin.data.provider;
 
-import java.io.Serializable;
-import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Objects;
 import java.util.Optional;
 import java.util.Set;
-import java.util.TreeSet;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.function.BiConsumer;
-import java.util.logging.Logger;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
+import com.vaadin.shared.Range;
+import com.vaadin.shared.data.HierarchicalDataCommunicatorConstants;
+import com.vaadin.ui.ItemCollapseAllowedProvider;
+
+import elemental.json.Json;
+import elemental.json.JsonObject;
+
 /**
  * Mapper for hierarchical data.
  * <p>
@@ -43,484 +48,465 @@ import java.util.stream.Stream;
  *
  * @author Vaadin Ltd
  * @since 8.1
+ * 
+ * @param <T>
+ *            the data type
+ * @param <F>
+ *            the filter type
  */
-class HierarchyMapper implements Serializable {
+public class HierarchyMapper<T, F> implements DataGenerator<T> {
+
+    // childMap is only used for finding parents of items and clean up on
+    // removing children of expanded nodes.
+    private Map<T, Set<T>> childMap = new HashMap<>();
 
-    private static final Logger LOGGER = Logger
-            .getLogger(HierarchyMapper.class.getName());
+    private final HierarchicalDataProvider<T, F> provider;
+    private F filter;
+    private List<QuerySortOrder> backEndSorting;
+    private Comparator<T> inMemorySorting;
+    private ItemCollapseAllowedProvider<T> itemCollapseAllowedProvider = t -> true;
+
+    private Set<Object> expandedItemIds = new HashSet<>();
 
     /**
-     * A POJO that represents a query data for a certain tree level.
+     * Constructs a new HierarchyMapper.
+     * 
+     * @param provider
+     *            the hierarchical data provider for this mapper
      */
-    static class TreeLevelQuery { // not serializable since not stored
-        /**
-         * The tree node that the query is for. Only used for fetching parent
-         * key.
-         */
-        final TreeNode node;
-        /** The start index of the query, from 0 to level's size - 1. */
-        final int startIndex;
-        /** The number of rows to fetch. s */
-        final int size;
-        /** The depth of this node. */
-        final int depth;
-        /** The first row index in grid, including all the nodes. */
-        final int firstRowIndex;
-        /** The direct subtrees for the node that effect the indexing. */
-        final List<TreeNode> subTrees;
-
-        TreeLevelQuery(TreeNode node, int startIndex, int size, int depth,
-                int firstRowIndex, List<TreeNode> subTrees) {
-            this.node = node;
-            this.startIndex = startIndex;
-            this.size = size;
-            this.depth = depth;
-            this.firstRowIndex = firstRowIndex;
-            this.subTrees = subTrees;
-        }
+    public HierarchyMapper(HierarchicalDataProvider<T, F> provider) {
+        this.provider = provider;
     }
 
     /**
-     * A level in the tree, either the root level or an expanded subtree level.
-     * <p>
-     * Comparable based on the {@link #startIndex}, which is flat from 0 to data
-     * size - 1.
+     * Returns the size of the currently expanded hierarchy.
+     * 
+     * @return the amount of available data
      */
-    static class TreeNode implements Serializable, Comparable<TreeNode> {
-
-        /** The key for the expanded item that this is a subtree of. */
-        private final String parentKey;
-        /** The first index on this level. */
-        private int startIndex;
-        /** The last index on this level, INCLUDING subtrees. */
-        private int endIndex;
-
-        TreeNode(String parentKey, int startIndex, int size) {
-            this.parentKey = parentKey;
-            this.startIndex = startIndex;
-            endIndex = startIndex + size - 1;
-        }
+    public int getTreeSize() {
+        return (int) getHierarchy(null).count();
+    }
 
-        TreeNode(int startIndex) {
-            parentKey = "INVALID";
-            this.startIndex = startIndex;
-        }
+    /**
+     * Finds the index of the parent of the item in given target index.
+     * 
+     * @param item
+     *            the item to get the parent of
+     * @return the parent index
+     * 
+     * @throws IllegalArgumentException
+     *             if given target index is not found
+     */
+    public Integer getParentIndex(T item) throws IllegalArgumentException {
+        // TODO: This can be optimised.
+        List<T> flatHierarchy = getHierarchy(null).collect(Collectors.toList());
+        return flatHierarchy.indexOf(getParentOfItem(item));
+    }
 
-        int getStartIndex() {
-            return startIndex;
+    /**
+     * Returns whether the given item is expanded.
+     * 
+     * @param item
+     *            the item to test
+     * @return {@code true} if item is expanded; {@code false} if not
+     */
+    public boolean isExpanded(T item) {
+        if (item == null) {
+            // Root nodes are always visible.
+            return true;
         }
+        return expandedItemIds.contains(getDataProvider().getId(item));
+    }
 
-        int getEndIndex() {
-            return endIndex;
+    /**
+     * Expands the given item.
+     * 
+     * @param item
+     *            the item to expand
+     * @param position
+     *            the index of item
+     * @return range of rows added by expanding the item
+     */
+    public Range doExpand(T item, Optional<Integer> position) {
+        Range rows = Range.withLength(0, 0);
+        if (!isExpanded(item) && hasChildren(item)) {
+            Object id = getDataProvider().getId(item);
+            expandedItemIds.add(id);
+            if (position.isPresent()) {
+                rows = Range.withLength(position.get() + 1,
+                        (int) getHierarchy(item, false).count());
+            }
         }
+        return rows;
+    }
 
-        String getParentKey() {
-            return parentKey;
+    /**
+     * Collapses the given item.
+     * 
+     * @param item
+     *            the item to expand
+     * @param position
+     *            the index of item
+     * 
+     * @return range of rows removed by collapsing the item
+     */
+    public Range doCollapse(T item, Optional<Integer> position) {
+        Range removedRows = Range.withLength(0, 0);
+        if (isExpanded(item)) {
+            Object id = getDataProvider().getId(item);
+            if (position.isPresent()) {
+                long childCount = getHierarchy(item, false).count();
+                removedRows = Range.withLength(position.get() + 1,
+                        (int) childCount);
+            }
+            expandedItemIds.remove(id);
         }
+        return removedRows;
+    }
 
-        private void push(int offset) {
-            startIndex += offset;
-            endIndex += offset;
-        }
+    @Override
+    public void generateData(T item, JsonObject jsonObject) {
+        JsonObject hierarchyData = Json.createObject();
 
-        private void pushEnd(int offset) {
-            endIndex += offset;
+        int depth = getDepth(item);
+        if (depth >= 0) {
+            hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_DEPTH,
+                    depth);
         }
 
-        @Override
-        public int compareTo(TreeNode other) {
-            return Integer.valueOf(startIndex).compareTo(other.startIndex);
+        boolean isLeaf = !getDataProvider().hasChildren(item);
+        if (isLeaf) {
+            hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_LEAF,
+                    true);
+        } else {
+            hierarchyData.put(
+                    HierarchicalDataCommunicatorConstants.ROW_COLLAPSED,
+                    !isExpanded(item));
+            hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_LEAF,
+                    false);
+            hierarchyData.put(
+                    HierarchicalDataCommunicatorConstants.ROW_COLLAPSE_ALLOWED,
+                    getItemCollapseAllowedProvider().test(item));
         }
 
-        @Override
-        public String toString() {
-            return "TreeNode [parent=" + parentKey + ", start=" + startIndex
-                    + ", end=" + getEndIndex() + "]";
-        }
+        // add hierarchy information to row as metadata
+        jsonObject.put(
+                HierarchicalDataCommunicatorConstants.ROW_HIERARCHY_DESCRIPTION,
+                hierarchyData);
+    }
 
+    /**
+     * Gets the current item collapse allowed provider.
+     * 
+     * @return the item collapse allowed provider
+     */
+    public ItemCollapseAllowedProvider<T> getItemCollapseAllowedProvider() {
+        return itemCollapseAllowedProvider;
     }
 
-    /** The expanded nodes in the tree. */
-    private final TreeSet<TreeNode> nodes = new TreeSet<>();
+    /**
+     * Sets the current item collapse allowed provider.
+     * 
+     * @param itemCollapseAllowedProvider
+     *            the item collapse allowed provider
+     */
+    public void setItemCollapseAllowedProvider(
+            ItemCollapseAllowedProvider<T> itemCollapseAllowedProvider) {
+        this.itemCollapseAllowedProvider = itemCollapseAllowedProvider;
+    }
 
     /**
-     * Map of collapsed subtrees. The keys of this map are the collapsed
-     * subtrees parent keys and values are the {@code TreeSet}s of the subtree's
-     * expanded {@code TreeNode}s.
+     * Gets the current in-memory sorting.
+     * 
+     * @return the in-memory sorting
      */
-    private final Map<String, TreeSet<TreeNode>> collapsedNodes = new HashMap<>();
+    public Comparator<T> getInMemorySorting() {
+        return inMemorySorting;
+    }
 
     /**
-     * Resets the tree, sets given the root level size.
-     *
-     * @param rootLevelSize
-     *            the number of items in the root level
+     * Sets the current in-memory sorting. This will cause the hierarchy to be
+     * constructed again.
+     * 
+     * @param inMemorySorting
+     *            the in-memory sorting
      */
-    public void reset(int rootLevelSize) {
-        collapsedNodes.clear();
-        nodes.clear();
-        nodes.add(new TreeNode(null, 0, rootLevelSize));
+    public void setInMemorySorting(Comparator<T> inMemorySorting) {
+        this.inMemorySorting = inMemorySorting;
     }
 
     /**
-     * Returns the complete size of the tree, including all expanded subtrees.
-     *
-     * @return the size of the tree
+     * Gets the current back-end sorting.
+     * 
+     * @return the back-end sorting
      */
-    public int getTreeSize() {
-        TreeNode rootNode = getNodeForKey(null)
-                .orElse(new TreeNode(null, 0, 0));
-        return rootNode.endIndex + 1;
+    public List<QuerySortOrder> getBackEndSorting() {
+        return backEndSorting;
     }
 
     /**
-     * Returns whether the node with the given is collapsed or not.
-     *
-     * @param itemKey
-     *            the key of node to check
-     * @return {@code true} if collapsed, {@code false} if expanded
+     * Sets the current back-end sorting. This will cause the hierarchy to be
+     * constructed again.
+     * 
+     * @param backEndSorting
+     *            the back-end sorting
      */
-    public boolean isCollapsed(String itemKey) {
-        return !getNodeForKey(itemKey).isPresent();
+    public void setBackEndSorting(List<QuerySortOrder> backEndSorting) {
+        this.backEndSorting = backEndSorting;
     }
 
     /**
-     * Return whether the given item key is still being used in this mapper.
-     *
-     * @param itemKey
-     *            the item key to look for
-     * @return {@code true} if the item key is still used, {@code false}
-     *         otherwise
+     * Gets the current filter.
+     * 
+     * @return the filter
      */
-    public boolean isKeyStored(String itemKey) {
-        if (getNodeForKey(itemKey).isPresent()) {
-            return true;
-        }
-        // Is the key used in a collapsed subtree?
-        for (Entry<String, TreeSet<TreeNode>> entry : collapsedNodes.entrySet()) {
-            if (entry.getKey() != null && entry.getKey().equals(itemKey)) {
-                return true;
-            }
-            for (TreeNode subTreeNode : entry.getValue()) {
-                if (subTreeNode.getParentKey() != null
-                        && subTreeNode.getParentKey().equals(itemKey)) {
-                    return true;
-                }
-            }
-        }
-        return false;
+    public F getFilter() {
+        return filter;
     }
 
     /**
-     * Return the depth of expanded node's subtree.
-     * <p>
-     * The root node depth is 0.
-     *
-     * @param expandedNodeKey
-     *            the item key of the expanded node
-     * @return the depth of the expanded node
-     * @throws IllegalArgumentException
-     *             if the node was not expanded
+     * Sets the current filter. This will cause the hierarchy to be constructed
+     * again.
+     * 
+     * @param filter
+     *            the filter
      */
-    protected int getDepth(String expandedNodeKey) {
-        Optional<TreeNode> node = getNodeForKey(expandedNodeKey);
-        if (!node.isPresent()) {
-            throw new IllegalArgumentException("No node with given key "
-                    + expandedNodeKey + " was expanded.");
-        }
-        TreeNode treeNode = node.get();
-        AtomicInteger start = new AtomicInteger(treeNode.startIndex);
-        AtomicInteger end = new AtomicInteger(treeNode.getEndIndex());
-        AtomicInteger depth = new AtomicInteger();
-        nodes.headSet(treeNode, false).descendingSet().forEach(higherNode -> {
-            if (higherNode.startIndex < start.get()
-                    && higherNode.getEndIndex() >= end.get()) {
-                start.set(higherNode.startIndex);
-                depth.incrementAndGet();
-            }
-        });
+    public void setFilter(Object filter) {
+        this.filter = (F) filter;
+    }
 
-        return depth.get();
+    /**
+     * Gets the {@code HierarchicalDataProvider} for this
+     * {@code HierarchyMapper}.
+     * 
+     * @return the hierarchical data provider
+     */
+    public HierarchicalDataProvider<T, F> getDataProvider() {
+        return provider;
     }
 
     /**
-     * Returns the tree node for the given expanded item key, or an empty
-     * optional if the item was not expanded.
-     *
-     * @param expandedNodeKey
-     *            the key of the item
-     * @return the tree node for the expanded item, or an empty optional if not
-     *         expanded
+     * Returns whether given item has children.
+     * 
+     * @param item
+     *            the node to test
+     * @return {@code true} if node has children; {@code false} if not
      */
-    protected Optional<TreeNode> getNodeForKey(String expandedNodeKey) {
-        return nodes.stream()
-                .filter(node -> Objects.equals(node.parentKey, expandedNodeKey))
-                .findAny();
+    public boolean hasChildren(T item) {
+        return getDataProvider().hasChildren(item);
     }
 
+    /* Fetch methods. These are used to calculate what to request. */
+
     /**
-     * Expands the node in the given index and with the given key.
-     *
-     * @param expandedRowKey
-     *            the key of the expanded item
-     * @param expandedRowIndex
-     *            the index of the expanded item
-     * @param expandedNodeSize
-     *            the size of the subtree of the expanded node, used if
-     *            previously unknown
-     * @throws IllegalStateException
-     *             if the node was expanded already
-     * @return the actual size of the expand
+     * Gets a stream of items in the form of a flattened hierarchy from the
+     * back-end and filter the wanted results from the list.
+     * 
+     * @param range
+     *            the requested item range
+     * @return the stream of items
      */
-    protected int expand(String expandedRowKey, int expandedRowIndex,
-            int expandedNodeSize) {
-        if (expandedNodeSize < 1) {
-            throw new IllegalArgumentException(
-                    "The expanded node's size cannot be less than 1, was "
-                            + expandedNodeSize);
-        }
-        TreeNode newNode;
-        TreeSet<TreeNode> subTree = null;
-        if (collapsedNodes.containsKey(expandedRowKey)) {
-            subTree = collapsedNodes.remove(expandedRowKey);
-            newNode = subTree.first();
-            int offset = expandedRowIndex - newNode.getStartIndex() + 1;
-            subTree.forEach(node -> node.push(offset));
-            expandedNodeSize = newNode.getEndIndex() - newNode.getStartIndex()
-                    + 1;
-        } else {
-            newNode = new TreeNode(expandedRowKey, expandedRowIndex + 1,
-                    expandedNodeSize);
-        }
+    public Stream<T> fetchItems(Range range) {
+        return getHierarchy(null).skip(range.getStart()).limit(range.length());
+    }
 
-        boolean added = nodes.add(newNode);
-        if (!added) {
-            throw new IllegalStateException("Node in index " + expandedRowIndex
-                    + " was expanded already.");
-        }
+    /**
+     * Gets a stream of children for the given item in the form of a flattened
+     * hierarchy from the back-end and filter the wanted results from the list.
+     * 
+     * @param parent
+     *            the parent item for the fetch
+     * @param range
+     *            the requested item range
+     * @return the stream of items
+     */
+    public Stream<T> fetchItems(T parent, Range range) {
+        return getHierarchy(parent, false).skip(range.getStart())
+                .limit(range.length());
+    }
 
-        // push end indexes for parent nodes
-        final int expandSize = expandedNodeSize;
-        List<TreeNode> updated = nodes.headSet(newNode, false).stream()
-                .filter(node -> node.getEndIndex() >= expandedRowIndex)
-                .collect(Collectors.toList());
-        nodes.removeAll(updated);
-        updated.stream().forEach(node -> node.pushEnd(expandSize));
-        nodes.addAll(updated);
+    /* Methods for providing information on the hierarchy. */
 
-        // push start and end indexes for later nodes
-        updated = nodes.tailSet(newNode, false).stream()
-                .collect(Collectors.toList());
-        nodes.removeAll(updated);
-        updated.stream().forEach(node -> node.push(expandSize));
-        nodes.addAll(updated);
+    /**
+     * Generic method for finding direct children of a given parent, limited by
+     * given range.
+     * 
+     * @param parent
+     *            the parent
+     * @param range
+     *            the range of direct children to return
+     * @return the requested children of the given parent
+     */
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    private Stream<T> doFetchDirectChildren(T parent, Range range) {
+        return getDataProvider().fetchChildren(new HierarchicalQuery(
+                range.getStart(), range.length(), getBackEndSorting(),
+                getInMemorySorting(), getFilter(), parent));
+    }
 
-        if (subTree != null) {
-            nodes.addAll(subTree);
+    private int getDepth(T item) {
+        int depth = -1;
+        while (item != null) {
+            item = getParentOfItem(item);
+            ++depth;
         }
+        return depth;
+    }
 
-        return expandSize;
+    private T getParentOfItem(T item) {
+        Objects.requireNonNull(item, "Can not find the parent of null");
+        Object itemId = getDataProvider().getId(item);
+        for (Entry<T, Set<T>> entry : childMap.entrySet()) {
+            if (entry.getValue().stream().map(getDataProvider()::getId)
+                    .anyMatch(id -> id.equals(itemId))) {
+                return entry.getKey();
+            }
+        }
+        return null;
     }
 
     /**
-     * Collapses the node in the given index.
-     *
-     * @param key
-     *            the key of the collapsed item
-     * @param collapsedRowIndex
-     *            the index of the collapsed item
-     * @return the size of the complete subtree that was collapsed
-     * @throws IllegalStateException
-     *             if the node was not collapsed, or if the given key is not the
-     *             same as it was when the node has been expanded
+     * Removes all children of an item identified by a given id. Items removed
+     * by this method as well as the original item are all marked to be
+     * collapsed.
+     * 
+     * @param id
+     *            the item id
      */
-    protected int collapse(String key, int collapsedRowIndex) {
-        Objects.requireNonNull(key,
-                "The key for the item to collapse cannot be null.");
-        TreeNode collapsedNode = nodes
-                .ceiling(new TreeNode(collapsedRowIndex + 1));
-        if (collapsedNode == null
-                || collapsedNode.startIndex != collapsedRowIndex + 1) {
-            throw new IllegalStateException(
-                    "Could not find expanded node for index "
-                            + collapsedRowIndex + ", node was not collapsed");
-        }
-        if (!Objects.equals(key, collapsedNode.parentKey)) {
-            throw new IllegalStateException("The expected parent key " + key
-                    + " is different for the collapsed node " + collapsedNode);
+    private void removeChildren(Object id) {
+        // Clean up removed nodes from child map
+        Iterator<Entry<T, Set<T>>> iterator = childMap.entrySet().iterator();
+        Set<T> invalidatedChildren = new HashSet<>();
+        while (iterator.hasNext()) {
+            Entry<T, Set<T>> entry = iterator.next();
+            T key = entry.getKey();
+            if (key != null && getDataProvider().getId(key).equals(id)) {
+                invalidatedChildren.addAll(entry.getValue());
+                iterator.remove();
+            }
         }
+        expandedItemIds.remove(id);
+        invalidatedChildren.stream().map(getDataProvider()::getId)
+                .forEach(this::removeChildren);
+    }
 
-        Set<TreeNode> subTreeNodes = nodes
-                .tailSet(collapsedNode).stream().filter(node -> collapsedNode
-                        .getEndIndex() >= node.getEndIndex())
-                .collect(Collectors.toSet());
-        collapsedNodes.put(collapsedNode.getParentKey(), new TreeSet<>(subTreeNodes));
-
-        // remove complete subtree
-        AtomicInteger removedSubTreeSize = new AtomicInteger(
-                collapsedNode.getEndIndex() - collapsedNode.startIndex + 1);
-        nodes.tailSet(collapsedNode, false).removeIf(
-                node -> node.startIndex <= collapsedNode.getEndIndex());
-
-        final int offset = -1 * removedSubTreeSize.get();
-        // adjust parent end indexes
-        List<TreeNode> updated = nodes.headSet(collapsedNode, false).stream()
-                .filter(node -> node.getEndIndex() >= collapsedRowIndex)
-                .collect(Collectors.toList());
-        nodes.removeAll(updated);
-        updated.stream().forEach(node -> node.pushEnd(offset));
-        nodes.addAll(updated);
+    /**
+     * Finds the current index of given object. This is based on a search in
+     * flattened version of the hierarchy.
+     * 
+     * @param target
+     *            the target object to find
+     * @return optional index of given object
+     */
+    public Optional<Integer> getIndexOf(T target) {
+        if (target == null) {
+            return Optional.empty();
+        }
 
-        // adjust start and end indexes for latter nodes
-        updated = nodes.tailSet(collapsedNode, false).stream()
+        final List<Object> collect = getHierarchy(null).map(provider::getId)
                 .collect(Collectors.toList());
-        nodes.removeAll(updated);
-        updated.stream().forEach(node -> node.push(offset));
-        nodes.addAll(updated);
+        int index = collect.indexOf(getDataProvider().getId(target));
+        return Optional.ofNullable(index < 0 ? null : index);
+    }
 
-        nodes.remove(collapsedNode);
+    /**
+     * Gets the full hierarchy tree starting from given node.
+     * 
+     * @param parent
+     *            the parent node to start from
+     * @return the flattened hierarchy as a stream
+     */
+    private Stream<T> getHierarchy(T parent) {
+        return getHierarchy(parent, true);
+    }
 
-        return removedSubTreeSize.get();
+    /**
+     * Getst hte full hierarchy tree starting from given node. The starting node
+     * can be omitted.
+     * 
+     * @param parent
+     *            the parent node to start from
+     * @param includeParent
+     *            {@code true} to include the parent; {@code false} if not
+     * @return the flattened hierarchy as a stream
+     */
+    private Stream<T> getHierarchy(T parent, boolean includeParent) {
+        return Stream.of(parent)
+                .flatMap(node -> getChildrenStream(node, includeParent));
     }
 
     /**
-     * Splits the given range into queries per tree level.
-     *
-     * @param firstRow
-     *            the first row to fetch
-     * @param lastRow
-     *            the last row to fetch
-     * @return a stream of query data per level
-     * @see #reorderLevelQueryResultsToFlatOrdering(BiConsumer, TreeLevelQuery,
-     *      List)
+     * Gets the stream of direct children for given node.
+     * 
+     * @param parent
+     *            the parent node
+     * @return the stream of direct children
      */
-    protected Stream<TreeLevelQuery> splitRangeToLevelQueries(
-            final int firstRow, final int lastRow) {
-        return nodes.stream()
-                // filter to parts intersecting with the range
-                .filter(node -> node.startIndex <= lastRow
-                        && firstRow <= node.getEndIndex())
-                // split into queries per level with level based indexing
-                .map(node -> {
-
-                    // calculate how subtrees effect indexing and size
-                    int depth = getDepth(node.parentKey);
-                    List<TreeNode> directSubTrees = nodes.tailSet(node, false)
-                            .stream()
-                            // find subtrees
-                            .filter(subTree -> node.startIndex < subTree
-                                    .getEndIndex()
-                                    && subTree.startIndex < node.getEndIndex())
-                            // filter to direct subtrees
-                            .filter(subTree -> getDepth(
-                                    subTree.parentKey) == (depth + 1))
-                            .collect(Collectors.toList());
-                    // first intersecting index in flat order
-                    AtomicInteger firstIntersectingRowIndex = new AtomicInteger(
-                            Math.max(node.startIndex, firstRow));
-                    // last intersecting index in flat order
-                    final int lastIntersectingRowIndex = Math
-                            .min(node.getEndIndex(), lastRow);
-                    // start index for this level
-                    AtomicInteger start = new AtomicInteger(
-                            firstIntersectingRowIndex.get() - node.startIndex);
-                    // how many nodes should be fetched for this level
-                    AtomicInteger size = new AtomicInteger(
-                            lastIntersectingRowIndex
-                                    - firstIntersectingRowIndex.get() + 1);
-
-                    // reduce subtrees before requested index
-                    directSubTrees.stream().filter(subtree -> subtree
-                            .getEndIndex() < firstIntersectingRowIndex.get())
-                            .forEachOrdered(subtree -> {
-                                start.addAndGet(-1 * (subtree.getEndIndex()
-                                        - subtree.startIndex + 1));
-                            });
-                    // if requested start index is in the middle of a
-                    // subtree, start is after that
-                    List<TreeNode> intersectingSubTrees = new ArrayList<>();
-                    directSubTrees.stream()
-                            .filter(subtree -> subtree.startIndex <= firstIntersectingRowIndex
-                                    .get() && firstIntersectingRowIndex
-                                            .get() <= subtree.getEndIndex())
-                            .findFirst().ifPresent(subtree -> {
-                                int previous = firstIntersectingRowIndex
-                                        .getAndSet(subtree.getEndIndex() + 1);
-                                int delta = previous
-                                        - firstIntersectingRowIndex.get();
-                                start.addAndGet(subtree.startIndex - previous);
-                                size.addAndGet(delta);
-                                intersectingSubTrees.add(subtree);
-                            });
-                    // reduce size of subtrees after first row that intersect
-                    // with requested range
-                    directSubTrees.stream()
-                            .filter(subtree -> firstIntersectingRowIndex
-                                    .get() < subtree.startIndex
-                                    && subtree.endIndex <= lastIntersectingRowIndex)
-                            .forEachOrdered(subtree -> {
-                                // reduce subtree size that is part of the
-                                // requested range from query size
-                                size.addAndGet(
-                                        -1 * (Math.min(subtree.getEndIndex(),
-                                                lastIntersectingRowIndex)
-                                                - subtree.startIndex + 1));
-                                intersectingSubTrees.add(subtree);
-                            });
-                    return new TreeLevelQuery(node, start.get(), size.get(),
-                            depth, firstIntersectingRowIndex.get(),
-                            intersectingSubTrees);
-
-                }).filter(query -> query.size > 0);
+    private Stream<T> getDirectChildren(T parent) {
+        return doFetchDirectChildren(parent, Range.between(0, getDataProvider()
+                .getChildCount(new HierarchicalQuery<>(filter, parent))));
+    }
 
+    /**
+     * The method to recursively fetch the children of given parent. Used with
+     * {@link Stream#flatMap} to expand a stream of parent nodes into a
+     * flattened hierarchy.
+     * 
+     * @param parent
+     *            the parent node
+     * @return the stream of all children under the parent, includes the parent
+     */
+    private Stream<T> getChildrenStream(T parent) {
+        return getChildrenStream(parent, true);
     }
 
     /**
-     * Merges the tree level query results into flat grid ordering.
-     *
-     * @param rangePositionCallback
-     *            the callback to place the results into
-     * @param query
-     *            the query data for the results
-     * @param results
-     *            the results to reorder
-     * @param <T>
-     *            the type of the results
+     * The method to recursively fetch the children of given parent. Used with
+     * {@link Stream#flatMap} to expand a stream of parent nodes into a
+     * flattened hierarchy.
+     * 
+     * @param parent
+     *            the parent node
+     * @param includeParent
+     *            {@code true} to include the parent in the stream;
+     *            {@code false} if not
+     * @return the stream of all children under the parent
      */
-    protected <T> void reorderLevelQueryResultsToFlatOrdering(
-            BiConsumer<T, Integer> rangePositionCallback, TreeLevelQuery query,
-            List<T> results) {
-        AtomicInteger nextPossibleIndex = new AtomicInteger(
-                query.firstRowIndex);
-        for (T item : results) {
-            // search for any intersecting subtrees and push index if necessary
-            query.subTrees.stream().filter(
-                    subTree -> subTree.startIndex <= nextPossibleIndex.get()
-                            && nextPossibleIndex.get() <= subTree.getEndIndex())
-                    .findAny().ifPresent(intersecting -> {
-                        nextPossibleIndex.addAndGet(intersecting.getEndIndex()
-                                - intersecting.startIndex + 1);
-                        query.subTrees.remove(intersecting);
-                    });
-            rangePositionCallback.accept(item,
-                    nextPossibleIndex.getAndIncrement());
+    private Stream<T> getChildrenStream(T parent, boolean includeParent) {
+        List<T> childList = Collections.emptyList();
+        if (isExpanded(parent)) {
+            childList = getDirectChildren(parent).collect(Collectors.toList());
+            if (childList.isEmpty()) {
+                removeChildren(parent == null ? null
+                        : getDataProvider().getId(parent));
+            } else {
+                childMap.put(parent, new HashSet<>(childList));
+            }
         }
+        return combineParentAndChildStreams(parent,
+                childList.stream().flatMap(this::getChildrenStream),
+                includeParent);
     }
 
     /**
-     * Returns parent index for the row or {@code null}
-     *
-     * @param rowIndex the row index
-     * @return the parent index or {@code null} for top-level items
+     * Helper method for combining parent and a stream of children into one
+     * stream. {@code null} item is never included, and parent can be skipped by
+     * providing the correct value for {@code includeParent}.
+     * 
+     * @param parent
+     *            the parent node
+     * @param children
+     *            the stream of children
+     * @param includeParent
+     *            {@code true} to include the parent in the stream;
+     *            {@code false} if not
+     * @return the combined stream of parent and its children
      */
-    public Integer getParentIndex(int rowIndex) {
-        return nodes.stream()
-                .filter(treeNode -> treeNode.getParentKey() != null
-                        && treeNode.getStartIndex() <= rowIndex
-                        && treeNode.getEndIndex() >= rowIndex)
-                .min((a, b) -> Math.min(a.getEndIndex() - a.getStartIndex(),
-                        b.getEndIndex() - b.getStartIndex()))
-                .map(treeNode -> treeNode.getStartIndex() - 1)
-                .orElse(null);
+    private Stream<T> combineParentAndChildStreams(T parent, Stream<T> children,
+            boolean includeParent) {
+        boolean parentIncluded = includeParent && parent != null;
+        Stream<T> parentStream = parentIncluded ? Stream.of(parent)
+                : Stream.empty();
+        return Stream.concat(parentStream, children);
     }
 }
index fdda7226fe02be04b74db50643348a1b3d3c1c2d..d32ff1464a81834c44af361cfc0290304a8772a3 100644 (file)
@@ -127,26 +127,28 @@ public class TreeGrid<T> extends Grid<T>
             @Override
             public void setNodeCollapsed(String rowKey, int rowIndex,
                     boolean collapse, boolean userOriginated) {
-                if (collapse) {
-                    if (getDataCommunicator().doCollapse(rowKey, rowIndex)
-                            && userOriginated) {
-                        fireCollapseEvent(getDataCommunicator().getKeyMapper()
-                                .get(rowKey), true);
-                    }
-                } else {
-                    if (getDataCommunicator().doExpand(rowKey, rowIndex,
-                            userOriginated) && userOriginated) {
-                        fireExpandEvent(getDataCommunicator().getKeyMapper()
-                                .get(rowKey), true);
-                    }
+                T item = getDataCommunicator().getKeyMapper().get(rowKey);
+                if (collapse && getDataCommunicator().isExpanded(item)) {
+                    getDataCommunicator().doCollapse(item,
+                            Optional.of(rowIndex));
+                    fireCollapseEvent(
+                            getDataCommunicator().getKeyMapper().get(rowKey),
+                            userOriginated);
+                } else if (!collapse
+                        && !getDataCommunicator().isExpanded(item)) {
+                    getDataCommunicator().doExpand(item, Optional.of(rowIndex));
+                    fireExpandEvent(
+                            getDataCommunicator().getKeyMapper().get(rowKey),
+                            userOriginated);
                 }
             }
         });
+
         registerRpc(new FocusParentRpc() {
             @Override
-            public void focusParent(int rowIndex, int cellIndex) {
-                Integer parentIndex = getDataCommunicator()
-                        .getParentIndex(rowIndex);
+            public void focusParent(String rowKey, int cellIndex) {
+                Integer parentIndex = getDataCommunicator().getParentIndex(
+                        getDataCommunicator().getKeyMapper().get(rowKey));
                 if (parentIndex != null) {
                     getRpcProxy(FocusRpc.class).focusCell(parentIndex,
                             cellIndex);
@@ -341,15 +343,14 @@ public class TreeGrid<T> extends Grid<T>
      *            the items to expand
      */
     public void expand(Collection<T> items) {
-        List<String> expandedKeys = new ArrayList<>();
-        List<T> expandedItems = new ArrayList<>();
-        items.forEach(item -> getDataCommunicator().setPendingExpand(item)
-                .ifPresent(key -> {
-                    expandedKeys.add(key);
-                    expandedItems.add(item);
-                }));
-        getRpcProxy(TreeGridClientRpc.class).setExpanded(expandedKeys);
-        expandedItems.forEach(item -> fireExpandEvent(item, false));
+        HierarchicalDataCommunicator<T> communicator = getDataCommunicator();
+        items.forEach(item -> {
+            if (!communicator.isExpanded(item)
+                    && communicator.hasChildren(item)) {
+                communicator.expand(item);
+                fireExpandEvent(item, false);
+            }
+        });
     }
 
     /**
@@ -373,15 +374,13 @@ public class TreeGrid<T> extends Grid<T>
      *            the collection of items to collapse
      */
     public void collapse(Collection<T> items) {
-        List<String> collapsedKeys = new ArrayList<>();
-        List<T> collapsedItems = new ArrayList<>();
-        items.forEach(item -> getDataCommunicator().collapseItem(item)
-                .ifPresent(key -> {
-                    collapsedKeys.add(key);
-                    collapsedItems.add(item);
-                }));
-        getRpcProxy(TreeGridClientRpc.class).setCollapsed(collapsedKeys);
-        collapsedItems.forEach(item -> fireCollapseEvent(item, false));
+        HierarchicalDataCommunicator<T> communicator = getDataCommunicator();
+        items.forEach(item -> {
+            if (communicator.isExpanded(item)) {
+                communicator.collapse(item);
+                fireCollapseEvent(item, false);
+            }
+        });
     }
 
     @Override
diff --git a/server/src/test/java/com/vaadin/data/provider/HierarchyMapperTest.java b/server/src/test/java/com/vaadin/data/provider/HierarchyMapperTest.java
deleted file mode 100644 (file)
index 1f48578..0000000
+++ /dev/null
@@ -1,147 +0,0 @@
-package com.vaadin.data.provider;
-
-import java.util.Optional;
-
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.vaadin.data.provider.HierarchyMapper.TreeNode;
-
-public class HierarchyMapperTest {
-
-    private HierarchyMapper mapper;
-
-    @Before
-    public void setup() {
-        mapper = new HierarchyMapper();
-    }
-
-    @Test
-    public void testExpandCollapse_rootLevel_indexesUpdated() {
-        mapper.reset(3);
-        verifyRootLevel(0, 2);
-
-        mapper.expand("1", 1, 3);
-
-        verifyTreeTotalSize(6);
-        verifyRootLevel(0, 5);
-        verifyNodeExists("1", 2, 4);
-
-        mapper.expand("0", 0, 3);
-
-        verifyRootLevel(0, 8);
-        verifyNodeExists("0", 1, 3);
-        verifyNodeExists("1", 5, 7);
-        verifyTreeTotalSize(9);
-
-        mapper.collapse("0", 0);
-
-        verifyRootLevel(0, 5);
-        verifyNodeExists("1", 2, 4);
-        verifyTreeTotalSize(6);
-        verifyNoNodeExists("0");
-    }
-
-    @Test
-    public void testExpandCollapse_secondLevelLastNode_indexesUpdated() {
-        mapper.reset(3);
-        verifyRootLevel(0, 2);
-
-        mapper.expand("1", 1, 3);
-
-        verifyTreeTotalSize(6);
-        verifyRootLevel(0, 5);
-        verifyNodeExists("1", 2, 4);
-
-        mapper.expand("0", 0, 3);
-
-        verifyRootLevel(0, 8);
-        verifyNodeExists("0", 1, 3);
-        verifyNodeExists("1", 5, 7);
-        verifyTreeTotalSize(9);
-
-        mapper.expand("2", 3, 3);
-
-        verifyRootLevel(0, 11);
-        verifyNodeExists("0", 1, 6);
-        verifyNodeExists("1", 8, 10);
-        verifyNodeExists("2", 4, 6);
-        verifyTreeTotalSize(12);
-
-        mapper.collapse("2", 3);
-
-        verifyRootLevel(0, 8);
-        verifyNodeExists("0", 1, 3);
-        verifyNodeExists("1", 5, 7);
-        verifyNoNodeExists("2");
-        verifyTreeTotalSize(9);
-
-        mapper.collapse("0", 0);
-
-        verifyRootLevel(0, 5);
-        verifyNodeExists("1", 2, 4);
-        verifyNoNodeExists("0");
-        verifyTreeTotalSize(6);
-    }
-
-    @Test
-    public void testCollapse_multipleLevels_wholeSubtreeDropped() {
-        // expand hierarchy up to 3 level
-        mapper.reset(5);
-        verifyRootLevel(0, 4);
-
-        mapper.expand("1", 2, 2);
-
-        verifyRootLevel(0, 6);
-        verifyNodeExists("1", 3, 4);
-        verifyTreeTotalSize(7);
-
-        mapper.expand("2", 3, 2);
-
-        verifyRootLevel(0, 8);
-        verifyNodeExists("1", 3, 6);
-        verifyNodeExists("2", 4, 5);
-        verifyTreeTotalSize(9);
-
-        mapper.expand("3", 6, 2);
-        verifyRootLevel(0, 10);
-        verifyNodeExists("1", 3, 8);
-        verifyNodeExists("2", 4, 5);
-        verifyNodeExists("3", 7, 8);
-        verifyTreeTotalSize(11);
-
-        // collapse root level node
-        mapper.collapse("1", 2);
-        verifyRootLevel(0, 4);
-        verifyNoNodeExists("1", "2", "3");
-    }
-
-    private void verifyRootLevel(int start, int end) {
-        verifyNode(start, end, mapper.getNodeForKey(null).get());
-    }
-
-    private void verifyNodeExists(String key, int start, int end) {
-        Optional<TreeNode> node = mapper.getNodeForKey(key);
-        Assert.assertTrue("NO NODE FOUND FOR KEY: " + key, node.isPresent());
-        verifyNode(start, end, node.get());
-    }
-
-    private void verifyNoNodeExists(String... nodeKeys) {
-        for (String key : nodeKeys) {
-            Assert.assertFalse("No node should exist for key " + key,
-                    mapper.getNodeForKey(key).isPresent());
-        }
-    }
-
-    private void verifyNode(int start, int end, TreeNode node) {
-        Assert.assertEquals("Invalid start for node " + node, start,
-                node.getStartIndex());
-        Assert.assertEquals("Invalid end for node " + node, end,
-                node.getEndIndex());
-    }
-
-    private void verifyTreeTotalSize(int size) {
-        Assert.assertEquals("Invalid tree size", size, mapper.getTreeSize());
-    }
-}
diff --git a/server/src/test/java/com/vaadin/data/provider/hierarchical/HierarchyMapperWithDataTest.java b/server/src/test/java/com/vaadin/data/provider/hierarchical/HierarchyMapperWithDataTest.java
new file mode 100644 (file)
index 0000000..411bf9d
--- /dev/null
@@ -0,0 +1,264 @@
+package com.vaadin.data.provider.hierarchical;
+
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.vaadin.data.TreeData;
+import com.vaadin.data.provider.HierarchyMapper;
+import com.vaadin.data.provider.TreeDataProvider;
+import com.vaadin.server.SerializablePredicate;
+import com.vaadin.shared.Range;
+import com.vaadin.shared.data.DataCommunicatorClientRpc;
+
+import elemental.json.JsonArray;
+
+public class HierarchyMapperWithDataTest {
+
+    private static final int ROOT_COUNT = 5;
+    private static final int PARENT_COUNT = 4;
+    private static final int LEAF_COUNT = 2;
+
+    private static TreeData<Node> data = new TreeData<>();
+    private TreeDataProvider<Node> provider;
+    private HierarchyMapper<Node, SerializablePredicate<Node>> mapper;
+    private static List<Node> testData;
+    private static List<Node> roots;
+    private int mapSize = ROOT_COUNT;
+
+    @BeforeClass
+    public static void setupData() {
+        testData = generateTestData();
+        roots = testData.stream().filter(item -> item.getParent() == null)
+                .collect(Collectors.toList());
+        data.addItems(roots,
+                parent -> testData.stream().filter(
+                        item -> Objects.equals(item.getParent(), parent))
+                        .collect(Collectors.toList()));
+    }
+
+    @Before
+    public void setup() {
+        provider = new TreeDataProvider<>(data);
+        mapper = new HierarchyMapper<>(provider);
+    }
+
+    @Test
+    public void expandRootNode() {
+        Assert.assertEquals("Map size should be equal to root node count",
+                ROOT_COUNT, mapper.getTreeSize());
+        expand(testData.get(0));
+        Assert.assertEquals("Should be root count + once parent count",
+                ROOT_COUNT + PARENT_COUNT, mapper.getTreeSize());
+        checkMapSize();
+    }
+
+    @Test
+    public void expandAndCollapseLastRootNode() {
+        Assert.assertEquals("Map size should be equal to root node count",
+                ROOT_COUNT, mapper.getTreeSize());
+        expand(roots.get(roots.size() - 1));
+        Assert.assertEquals("Should be root count + once parent count",
+                ROOT_COUNT + PARENT_COUNT, mapper.getTreeSize());
+        checkMapSize();
+        collapse(roots.get(roots.size() - 1));
+        Assert.assertEquals("Map size should be equal to root node count again",
+                ROOT_COUNT, mapper.getTreeSize());
+        checkMapSize();
+    }
+
+    @Test
+    public void expandHiddenNode() {
+        Assert.assertEquals("Map size should be equal to root node count",
+                ROOT_COUNT, mapper.getTreeSize());
+        expand(testData.get(1));
+        Assert.assertEquals(
+                "Map size should not change when expanding a hidden node",
+                ROOT_COUNT, mapper.getTreeSize());
+        checkMapSize();
+        expand(roots.get(0));
+        Assert.assertEquals("Hidden node should now be expanded as well",
+                ROOT_COUNT + PARENT_COUNT + LEAF_COUNT, mapper.getTreeSize());
+        checkMapSize();
+        collapse(roots.get(0));
+        Assert.assertEquals("Map size should be equal to root node count",
+                ROOT_COUNT, mapper.getTreeSize());
+        checkMapSize();
+    }
+
+    @Test
+    public void expandLeafNode() {
+        Assert.assertEquals("Map size should be equal to root node count",
+                ROOT_COUNT, mapper.getTreeSize());
+        expand(testData.get(0));
+        expand(testData.get(1));
+        Assert.assertEquals("Root and parent node expanded",
+                ROOT_COUNT + PARENT_COUNT + LEAF_COUNT, mapper.getTreeSize());
+        checkMapSize();
+        expand(testData.get(2));
+        Assert.assertEquals("Expanding a leaf node should have no effect",
+                ROOT_COUNT + PARENT_COUNT + LEAF_COUNT, mapper.getTreeSize());
+        checkMapSize();
+    }
+
+    @Test
+    public void findParentIndexOfLeaf() {
+        expand(testData.get(0));
+        Assert.assertEquals("Could not find the root node of a parent",
+                Integer.valueOf(0), mapper.getParentIndex(testData.get(1)));
+
+        expand(testData.get(1));
+        Assert.assertEquals("Could not find the parent of a leaf",
+                Integer.valueOf(1), mapper.getParentIndex(testData.get(2)));
+    }
+
+    @Test
+    public void fetchRangeOfRows() {
+        expand(testData.get(0));
+        expand(testData.get(1));
+
+        List<Node> expectedResult = testData.stream()
+                .filter(n -> roots.contains(n)
+                        || n.getParent().equals(testData.get(0))
+                        || n.getParent().equals(testData.get(1)))
+                .collect(Collectors.toList());
+
+        // Range containing deepest level of expanded nodes without their
+        // parents in addition to root nodes at the end.
+        Range range = Range.between(3, mapper.getTreeSize());
+        verifyFetchIsCorrect(expectedResult, range);
+
+        // Only the expanded two nodes, nothing more.
+        range = Range.between(0, 2);
+        verifyFetchIsCorrect(expectedResult, range);
+
+        // Fetch everything
+        range = Range.between(0, mapper.getTreeSize());
+        verifyFetchIsCorrect(expectedResult, range);
+    }
+
+    @Test
+    public void fetchRangeOfRowsWithSorting() {
+        // Expand before sort
+        expand(testData.get(0));
+        expand(testData.get(1));
+
+        // Construct a sorted version of test data with correct filters
+        List<List<Node>> levels = new ArrayList<>();
+        Comparator<Node> comparator = Comparator.comparing(Node::getNumber)
+                .reversed();
+        levels.add(testData.stream().filter(n -> n.getParent() == null)
+                .sorted(comparator).collect(Collectors.toList()));
+        levels.add(
+                testData.stream().filter(n -> n.getParent() == testData.get(0))
+                        .sorted(comparator).collect(Collectors.toList()));
+        levels.add(
+                testData.stream().filter(n -> n.getParent() == testData.get(1))
+                        .sorted(comparator).collect(Collectors.toList()));
+
+        List<Node> expectedResult = levels.get(0).stream().flatMap(root -> {
+            Stream<Node> nextLevel = levels.get(1).stream()
+                    .filter(n -> n.getParent() == root)
+                    .flatMap(node -> Stream.concat(Stream.of(node),
+                            levels.get(2).stream()
+                                    .filter(n -> n.getParent() == node)));
+            return Stream.concat(Stream.of(root), nextLevel);
+        }).collect(Collectors.toList());
+
+        // Apply sorting
+        mapper.setInMemorySorting(comparator::compare);
+
+        // Range containing deepest level of expanded nodes without their
+        // parents in addition to root nodes at the end.
+        Range range = Range.between(8, mapper.getTreeSize());
+        verifyFetchIsCorrect(expectedResult, range);
+
+        // Only the root nodes, nothing more.
+        range = Range.between(0, ROOT_COUNT);
+        verifyFetchIsCorrect(expectedResult, range);
+
+        // Fetch everything
+        range = Range.between(0, mapper.getTreeSize());
+        verifyFetchIsCorrect(expectedResult, range);
+    }
+
+    @Test
+    public void fetchWithFilter() {
+        expand(testData.get(0));
+        Node expandedNode = testData.get(2 + LEAF_COUNT); // Expand second node
+        expand(expandedNode);
+
+        SerializablePredicate<Node> filter = n -> n.getNumber() % 2 == 0;
+        List<Node> expectedResult = testData.stream().filter(filter)
+                .filter(n -> roots.contains(n)
+                        || n.getParent().equals(testData.get(0))
+                        || n.getParent().equals(expandedNode))
+                .collect(Collectors.toList());
+
+        mapper.setFilter(filter);
+
+        // Fetch everything
+        Range range = Range.between(0, mapper.getTreeSize());
+        verifyFetchIsCorrect(expectedResult, range);
+    }
+
+    private void expand(Node node) {
+        insertRows(mapper.doExpand(node, mapper.getIndexOf(node)));
+    }
+
+    private void collapse(Node node) {
+        removeRows(mapper.doCollapse(node, mapper.getIndexOf(node)));
+    }
+
+    private void verifyFetchIsCorrect(List<Node> expectedResult, Range range) {
+        List<Node> collect = mapper.fetchItems(range)
+                .collect(Collectors.toList());
+        for (int i = 0; i < range.length(); ++i) {
+            Assert.assertEquals("Unexpected fetch results.",
+                    expectedResult.get(i + range.getStart()), collect.get(i));
+        }
+    }
+
+    private static List<Node> generateTestData() {
+        List<Node> nodes = new ArrayList<>();
+        for (int i = 0; i < ROOT_COUNT; ++i) {
+            Node root = new Node();
+            nodes.add(root);
+            for (int j = 0; j < PARENT_COUNT; ++j) {
+                Node parent = new Node(root);
+                nodes.add(parent);
+                for (int k = 0; k < LEAF_COUNT; ++k) {
+                    nodes.add(new Node(parent));
+                }
+            }
+        }
+        return nodes;
+    }
+
+    private void checkMapSize() {
+        Assert.assertEquals("Map size not properly updated",
+                mapper.getTreeSize(), mapSize);
+    }
+
+    public void removeRows(Range range) {
+        Assert.assertTrue("Index not in range",
+                0 <= range.getStart() && range.getStart() < mapSize);
+        Assert.assertTrue("Removing more items than in map",
+                range.getEnd() <= mapSize);
+        mapSize -= range.length();
+    }
+
+    public void insertRows(Range range) {
+        Assert.assertTrue("Index not in range",
+                0 <= range.getStart() && range.getStart() <= mapSize);
+        mapSize += range.length();
+    }
+}
diff --git a/server/src/test/java/com/vaadin/data/provider/hierarchical/Node.java b/server/src/test/java/com/vaadin/data/provider/hierarchical/Node.java
new file mode 100644 (file)
index 0000000..c15f713
--- /dev/null
@@ -0,0 +1,33 @@
+package com.vaadin.data.provider.hierarchical;
+
+import java.io.Serializable;
+
+public class Node implements Serializable{
+
+    private static int counter = 0;
+
+    private final Node parent;
+    private final int number;
+
+    public Node() {
+        this(null);
+    }
+
+    public Node(Node parent) {
+        this.parent = parent;
+        this.number = counter++;
+    }
+
+    public Node getParent() {
+        return parent;
+    }
+
+    public int getNumber() {
+        return number;
+    }
+
+    public String toString() {
+        return number + (parent != null ? " [parent: " + parent.toString() + "]"
+                : "");
+    }
+}
index 856b3369db484af34d7e487881984c4783a43d9b..333fc84d357b19f1a92b948703f24a11d5786793 100644 (file)
@@ -28,10 +28,10 @@ public interface FocusParentRpc extends ServerRpc {
     /**
      * Focuses cell in the row parent
      *
-     * @param rowIndex
-     *            the row index
+     * @param rowKey
+     *            the row key
      * @param cellIndex
      *            the cell index
      */
-    void focusParent(int rowIndex, int cellIndex);
+    void focusParent(String rowKey, int cellIndex);
 }
index d630e3629268ccf2dbd2c66baf2648c751863e48..53d1cb98e740ccecaea5f0f2e120f025f82e5641 100644 (file)
@@ -14,8 +14,7 @@ import com.vaadin.ui.TreeGrid;
 
 public class TreeGridChangingHierarchy extends AbstractTestUI {
 
-    private static class TestDataProvider
-            extends TreeDataProvider<String> {
+    private static class TestDataProvider extends TreeDataProvider<String> {
 
         private TreeData<String> treeData;
 
@@ -64,24 +63,34 @@ public class TreeGridChangingHierarchy extends AbstractTestUI {
         Button btn3 = new Button("remove a/a");
         btn3.addClickListener(event -> {
             data.removeItem("a/a");
+            // Inform item removal to DataProvider
+            grid.getDataProvider().refreshAll();
         });
         Button btn4 = new Button("remove children of a/a");
         btn4.addClickListener(event -> {
             data.removeItem("a/a/a");
             data.removeItem("a/a/c");
+            // Inform item removal to DataProvider
+            grid.getDataProvider().refreshAll();
         });
         Button btn5 = new Button("remove a");
         btn5.addClickListener(event -> {
             data.removeItem("a");
+            // Inform item removal to DataProvider
+            grid.getDataProvider().refreshAll();
         });
         Button btn6 = new Button("remove children of a");
         btn6.addClickListener(event -> {
             data.removeItem("a/a");
             data.removeItem("a/b");
+            // Inform item removal to DataProvider
+            grid.getDataProvider().refreshAll();
         });
         Button btn7 = new Button("remove children of a/a/a");
         btn7.addClickListener(event -> {
             data.removeItem("a/a/a/a");
+            // Inform item removal to DataProvider
+            grid.getDataProvider().refreshAll();
         });
 
         addComponents(grid, btn, btn2, btn3, btn4, btn5, btn6, btn7);
index 89273af661797f10fa3c83558d62a7fe4ab80407..eaac894523e5cc84d6a84dbc8f98e78df4ddfd6d 100644 (file)
@@ -52,9 +52,6 @@ public class TreeGridChangingHierarchyTest extends SingleBrowserTest {
         grid.expandWithClick(1);
         grid.collapseWithClick(0);
         removeAABtn.click();
-        // Item removed from hierarchy. when encountering less children than
-        // expected, should reset:
-        grid.expandWithClick(0);
         // expand "a" after the reset:
         grid.expandWithClick(0);
         // "a/a" should be removed from a's children:
@@ -92,9 +89,6 @@ public class TreeGridChangingHierarchyTest extends SingleBrowserTest {
         grid.expandWithClick(2);
         removeChildrenOfAAABtn.click();
         grid.collapseWithClick(1);
-        // reset should get triggered here
-        grid.expandWithClick(1);
-        grid.expandWithClick(0);
         grid.expandWithClick(1);
         assertEquals("a/a/a", grid.getCell(2, 0).getText());
         assertFalse(grid.hasExpandToggle(2, 0));
@@ -106,7 +100,9 @@ public class TreeGridChangingHierarchyTest extends SingleBrowserTest {
         grid.expandWithClick(0);
         grid.getCell(1, 0).click();
         removeChildrenOfABtn.click();
-        grid.collapseWithClick(0);
+        // HierarchyMapper will notice the removal of the children of a, and
+        // mark it as collapsed.
+        // grid.collapseWithClick(0);
         grid.getCell(1, 0).click();
         assertTrue(grid.getRow(1).isSelected());
     }