From f76cc830efa7f4823e8eed272de8b5ca2cfebfc3 Mon Sep 17 00:00:00 2001 From: Teemu Suo-Anttila Date: Thu, 15 Jun 2017 15:44:56 +0300 Subject: [PATCH] Rewrite HierarchyMapper to consistently handle changes Fixes #9449 Fixes #9490 Fixes #9448 Fixes #9465 --- .../client/ui/treegrid/TreeGridConnector.java | 2 +- .../data/provider/DataCommunicator.java | 75 +- .../HierarchicalDataCommunicator.java | 515 +++--------- .../vaadin/data/provider/HierarchyMapper.java | 784 +++++++++--------- .../src/main/java/com/vaadin/ui/TreeGrid.java | 65 +- .../data/provider/HierarchyMapperTest.java | 147 ---- .../HierarchyMapperWithDataTest.java | 264 ++++++ .../data/provider/hierarchical/Node.java | 33 + .../shared/ui/treegrid/FocusParentRpc.java | 6 +- .../treegrid/TreeGridChangingHierarchy.java | 13 +- .../TreeGridChangingHierarchyTest.java | 10 +- 11 files changed, 923 insertions(+), 991 deletions(-) delete mode 100644 server/src/test/java/com/vaadin/data/provider/HierarchyMapperTest.java create mode 100644 server/src/test/java/com/vaadin/data/provider/hierarchical/HierarchyMapperWithDataTest.java create mode 100644 server/src/test/java/com/vaadin/data/provider/hierarchical/Node.java diff --git a/client/src/main/java/com/vaadin/client/ui/treegrid/TreeGridConnector.java b/client/src/main/java/com/vaadin/client/ui/treegrid/TreeGridConnector.java index e92747dde9..48ba0562fd 100644 --- a/client/src/main/java/com/vaadin/client/ui/treegrid/TreeGridConnector.java +++ b/client/src/main/java/com/vaadin/client/ui/treegrid/TreeGridConnector.java @@ -376,7 +376,7 @@ public class TreeGridConnector extends GridConnector { // navigate up int columnIndex = cell.getColumnIndex(); getRpcProxy(FocusParentRpc.class).focusParent( - cell.getRowIndex(), columnIndex); + getRowKey(cell.getRow()), columnIndex); } else if (isCollapseAllowed(rowDescription)) { setCollapsed(cell.getRowIndex(), true); } diff --git a/server/src/main/java/com/vaadin/data/provider/DataCommunicator.java b/server/src/main/java/com/vaadin/data/provider/DataCommunicator.java index 7427e7a8c5..230eab8d40 100644 --- a/server/src/main/java/com/vaadin/data/provider/DataCommunicator.java +++ b/server/src/main/java/com/vaadin/data/provider/DataCommunicator.java @@ -25,7 +25,6 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; @@ -193,12 +192,13 @@ public class DataCommunicator extends AbstractExtension { private final Collection> generators = new LinkedHashSet<>(); private final ActiveDataHandler handler = new ActiveDataHandler(); - /** Empty default data provider */ + /** Empty default data provider. */ protected DataProvider dataProvider = new CallbackDataProvider<>( q -> Stream.empty(), q -> 0); private final DataKeyMapper keyMapper; - protected boolean reset = false; + /** Boolean for pending hard reset. */ + protected boolean reset = true; private final Set updatedData = new HashSet<>(); private int minPushSize = 40; private Range pushRows = Range.withLength(0, minPushSize); @@ -323,9 +323,7 @@ public class DataCommunicator extends AbstractExtension { } if (initial || reset) { - @SuppressWarnings({ "rawtypes", "unchecked" }) - int dataProviderSize = getDataProvider().size(new Query(filter)); - rpc.reset(dataProviderSize); + rpc.reset(getDataProviderSize()); } Range requestedRows = getPushRows(); @@ -334,11 +332,7 @@ public class DataCommunicator extends AbstractExtension { int offset = requestedRows.getStart(); int limit = requestedRows.length(); - @SuppressWarnings({ "rawtypes", "unchecked" }) - List rowsToPush = (List) getDataProvider() - .fetch(new Query(offset, limit, backEndSorting, - inMemorySorting, filter)) - .collect(Collectors.toList()); + List rowsToPush = fetchItemsWithRange(offset, limit); if (!initial && !reset && rowsToPush.size() == 0) { triggerReset = true; @@ -361,6 +355,13 @@ public class DataCommunicator extends AbstractExtension { updatedData.clear(); } + @SuppressWarnings({ "rawtypes", "unchecked" }) + protected List fetchItemsWithRange(int offset, int limit) { + return (List) getDataProvider().fetch(new Query(offset, limit, + backEndSorting, inMemorySorting, filter)) + .collect(Collectors.toList()); + } + /** * Adds a data generator to this data communicator. Data generators can be * used to insert custom data to the rows sent to the client. If the data @@ -480,15 +481,15 @@ public class DataCommunicator extends AbstractExtension { } /** - * Informs the DataProvider that the collection has changed. + * Method for internal reset from a change in the component, requiring a + * full data update. */ public void reset() { - if (reset) { - return; + // Only needed if a full reset is not pending. + if (!reset) { + // Soft reset through client-side re-request. + getClientRpc().reset(getDataProviderSize()); } - - reset = true; - markAsDirty(); } /** @@ -641,7 +642,7 @@ public class DataCommunicator extends AbstractExtension { if (isAttached()) { attachDataProviderListener(); } - reset(); + hardReset(); return filter -> { if (this.dataProvider != dataProvider) { @@ -650,12 +651,27 @@ public class DataCommunicator extends AbstractExtension { } if (!Objects.equals(this.filter, filter)) { - this.filter = filter; + setFilter(filter); reset(); } }; } + /** + * Sets the filter for this DataCommunicator. This method is used by user + * through the consumer method from {@link #setDataProvider} and should not + * be called elsewhere. + * + * @param filter + * the filter + * + * @param + * the filter type + */ + protected void setFilter(F filter) { + this.filter = filter; + } + /** * Set minimum size of data which will be sent to the client when data * source is set. @@ -693,6 +709,17 @@ public class DataCommunicator extends AbstractExtension { return minPushSize; } + /** + * Getter method for finding the size of DataProvider. Can be overridden by + * a subclass that uses a specific type of DataProvider and/or query. + * + * @return the size of data provider with current filter + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + protected int getDataProviderSize() { + return getDataProvider().size(new Query(getFilter())); + } + @Override protected DataCommunicatorState getState(boolean markAsDirty) { return (DataCommunicatorState) super.getState(markAsDirty); @@ -713,12 +740,20 @@ public class DataCommunicator extends AbstractExtension { generators.forEach(g -> g.refreshData(item)); refresh(item); } else { - reset(); + hardReset(); } }); }); } + private void hardReset() { + if (reset) { + return; + } + reset = true; + markAsDirty(); + } + private void detachDataProviderListener() { if (dataProviderUpdateRegistration != null) { dataProviderUpdateRegistration.remove(); diff --git a/server/src/main/java/com/vaadin/data/provider/HierarchicalDataCommunicator.java b/server/src/main/java/com/vaadin/data/provider/HierarchicalDataCommunicator.java index 715538112e..3e9eb01921 100644 --- a/server/src/main/java/com/vaadin/data/provider/HierarchicalDataCommunicator.java +++ b/server/src/main/java/com/vaadin/data/provider/HierarchicalDataCommunicator.java @@ -15,31 +15,19 @@ */ package com.vaadin.data.provider; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; +import java.util.Comparator; import java.util.List; import java.util.Objects; import java.util.Optional; -import java.util.Set; -import java.util.function.BiConsumer; -import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; import com.vaadin.data.TreeData; -import com.vaadin.data.provider.HierarchyMapper.TreeLevelQuery; -import com.vaadin.data.provider.HierarchyMapper.TreeNode; import com.vaadin.server.SerializableConsumer; import com.vaadin.shared.Range; -import com.vaadin.shared.data.HierarchicalDataCommunicatorConstants; import com.vaadin.shared.extension.datacommunicator.HierarchicalDataCommunicatorState; import com.vaadin.ui.ItemCollapseAllowedProvider; -import elemental.json.Json; -import elemental.json.JsonArray; -import elemental.json.JsonObject; - /** * Data communicator that handles requesting hierarchical data from * {@link HierarchicalDataProvider} and sending it to client side. @@ -51,35 +39,20 @@ import elemental.json.JsonObject; */ public class HierarchicalDataCommunicator extends DataCommunicator { - private static final Logger LOGGER = Logger - .getLogger(HierarchicalDataCommunicator.class.getName()); - - /** - * The amount of root level nodes to fetch and push to the client. - */ - private static final int INITIAL_FETCH_SIZE = 100; - - private HierarchyMapper mapper = new HierarchyMapper(); - - private Set rowKeysPendingExpand = new HashSet<>(); + private HierarchyMapper mapper; /** * Collapse allowed provider used to allow/disallow collapsing nodes. */ private ItemCollapseAllowedProvider itemCollapseAllowedProvider = t -> true; - /** - * The captured client side cache size. - */ - private int latestCacheSize = INITIAL_FETCH_SIZE; - /** * Construct a new hierarchical data communicator backed by a * {@link TreeDataProvider}. */ public HierarchicalDataCommunicator() { super(); - dataProvider = new TreeDataProvider<>(new TreeData<>()); + setDataProvider(new TreeDataProvider(new TreeData<>()), null); } @Override @@ -93,225 +66,11 @@ public class HierarchicalDataCommunicator extends DataCommunicator { } @Override - protected void sendDataToClient(boolean initial) { - // on purpose do not call super - if (getDataProvider() == null) { - return; - } - - if (initial || reset) { - loadInitialData(); - } else { - loadRequestedRows(); - } - - if (!getUpdatedData().isEmpty()) { - JsonArray dataArray = Json.createArray(); - int i = 0; - for (T data : getUpdatedData()) { - dataArray.set(i++, createDataObject(data, -1)); - } - getClientRpc().updateData(dataArray); - getUpdatedData().clear(); - } - } - - private void loadInitialData() { - int rootSize = doSizeQuery(null); - mapper.reset(rootSize); - - if (rootSize != 0) { - Range initialRange = getInitialRowsToPush(rootSize); - assert !initialRange - .isEmpty() : "Initial range should never be empty."; - Stream rootItems = doFetchQuery(initialRange.getStart(), - initialRange.length(), null); - - // for now just fetching data for the root level as everything is - // collapsed by default - List items = rootItems.collect(Collectors.toList()); - List dataObjects = items.stream() - .map(item -> createDataObject(item, 0)) - .collect(Collectors.toList()); - - getClientRpc().reset(rootSize); - sendData(0, dataObjects); - getActiveDataHandler().addActiveData(items.stream()); - getActiveDataHandler().cleanUp(items.stream()); - } else { - getClientRpc().reset(0); - } - - setPushRows(Range.withLength(0, 0)); - // any updated data is ignored at this point - getUpdatedData().clear(); - reset = false; - } - - private void loadRequestedRows() { - final Range requestedRows = getPushRows(); - if (!requestedRows.isEmpty()) { - doPushRows(requestedRows, 0); - } - - setPushRows(Range.withLength(0, 0)); - } - - /** - * Attempts to push the requested range of rows to the client. Will trigger - * a reset if the data provider is unable to provide the requested number of - * items. - * - * @param requestedRows - * the range of rows to push - * @param insertRowsCount - * number of rows to insert, beginning at the start index of - * {@code requestedRows}, 0 to not insert any rows - * @return {@code true} if the range was successfully pushed to the client, - * {@code false} if the push was unsuccessful and a reset was - * triggered - */ - private boolean doPushRows(final Range requestedRows, int insertRowsCount) { - Stream levelQueries = mapper.splitRangeToLevelQueries( - requestedRows.getStart(), requestedRows.getEnd() - 1); - - JsonObject[] dataObjects = new JsonObject[requestedRows.length()]; - BiConsumer rowDataMapper = (object, - index) -> dataObjects[index - - requestedRows.getStart()] = object; - List fetchedItems = new ArrayList<>(dataObjects.length); - - levelQueries.forEach(query -> { - List results = doFetchQuery(query.startIndex, query.size, - getKeyMapper().get(query.node.getParentKey())) - .collect(Collectors.toList()); - - fetchedItems.addAll(results); - List rowData = results.stream() - .map(item -> createDataObject(item, query.depth)) - .collect(Collectors.toList()); - mapper.reorderLevelQueryResultsToFlatOrdering(rowDataMapper, query, - rowData); - }); - - if (hasNullItems(dataObjects, requestedRows)) { - reset(); - return false; - } - - if (insertRowsCount > 0) { - getClientRpc().insertRows(requestedRows.getStart(), - insertRowsCount); - } - - sendData(requestedRows.getStart(), Arrays.asList(dataObjects)); - getActiveDataHandler().addActiveData(fetchedItems.stream()); - getActiveDataHandler().cleanUp(fetchedItems.stream()); - return true; - } - - private boolean hasNullItems(JsonObject[] dataObjects, - Range requestedRange) { - for (JsonObject object : dataObjects) { - if (object == null) { - return true; - } - } - return false; - } - - private JsonObject createDataObject(T item, int depth) { - JsonObject dataObject = getDataObject(item); - - JsonObject hierarchyData = Json.createObject(); - if (depth != -1) { - hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_DEPTH, - depth); - } - - boolean isLeaf = !getDataProvider().hasChildren(item); - if (isLeaf) { - hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_LEAF, - true); - } else { - String key = getKeyMapper().key(item); - hierarchyData.put( - HierarchicalDataCommunicatorConstants.ROW_COLLAPSED, - mapper.isCollapsed(key)); - hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_LEAF, - false); - hierarchyData.put( - HierarchicalDataCommunicatorConstants.ROW_COLLAPSE_ALLOWED, - itemCollapseAllowedProvider.test(item)); - } - - // add hierarchy information to row as metadata - dataObject.put( - HierarchicalDataCommunicatorConstants.ROW_HIERARCHY_DESCRIPTION, - hierarchyData); - - return dataObject; - } - - private void sendData(int startIndex, List dataObjects) { - JsonArray dataArray = Json.createArray(); - int i = 0; - for (JsonObject dataObject : dataObjects) { - dataArray.set(i++, dataObject); - } - - getClientRpc().setData(startIndex, dataArray); - } - - /** - * Returns the range of rows to push on initial response. - * - * @param rootLevelSize - * the amount of rows on the root level - * @return the range of rows to push initially - */ - private Range getInitialRowsToPush(int rootLevelSize) { - // TODO optimize initial level to avoid unnecessary requests - return Range.between(0, Math.min(rootLevelSize, latestCacheSize)); - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - private Stream doFetchQuery(int start, int length, T parentItem) { - return getDataProvider() - .fetch(new HierarchicalQuery(start, length, getBackEndSorting(), - getInMemorySorting(), getFilter(), parentItem)); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - private int doSizeQuery(T parentItem) { - return getDataProvider() - .getChildCount(new HierarchicalQuery(getFilter(), parentItem)); - } - - @Override - protected void onRequestRows(int firstRowIndex, int numberOfRows, - int firstCachedRowIndex, int cacheSize) { - super.onRequestRows(firstRowIndex, numberOfRows, firstCachedRowIndex, - cacheSize); - } - - @Override - protected void onDropRows(JsonArray keys) { - for (int i = 0; i < keys.length(); i++) { - // cannot drop keys of expanded rows, parents of expanded rows or - // rows that are pending expand - String itemKey = keys.getString(i); - if (!mapper.isKeyStored(itemKey) - && !rowKeysPendingExpand.contains(itemKey)) { - getActiveDataHandler().dropActiveData(itemKey); - } - } - } - - @Override - protected void dropAllData() { - super.dropAllData(); - rowKeysPendingExpand.clear(); + protected List fetchItemsWithRange(int offset, int limit) { + // Instead of adding logic to this class, delegate request to the + // separate object handling hierarchies. + return mapper.fetchItems(Range.withLength(offset, limit)) + .collect(Collectors.toList()); } @Override @@ -335,7 +94,25 @@ public class HierarchicalDataCommunicator extends DataCommunicator { */ public SerializableConsumer setDataProvider( HierarchicalDataProvider dataProvider, F initialFilter) { - return super.setDataProvider(dataProvider, initialFilter); + SerializableConsumer consumer = super.setDataProvider(dataProvider, + initialFilter); + + // Remove old mapper + if (mapper != null) { + removeDataGenerator(mapper); + } + mapper = new HierarchyMapper<>(dataProvider); + + // Set up mapper for requests + mapper.setBackEndSorting(getBackEndSorting()); + mapper.setInMemorySorting(getInMemorySorting()); + mapper.setFilter(getFilter()); + mapper.setItemCollapseAllowedProvider(getItemCollapseAllowedProvider()); + + // Provide hierarchy data to json + addDataGenerator(mapper); + + return consumer; } /** @@ -357,7 +134,9 @@ public class HierarchicalDataCommunicator extends DataCommunicator { public SerializableConsumer setDataProvider( DataProvider dataProvider, F initialFilter) { if (dataProvider instanceof HierarchicalDataProvider) { - return super.setDataProvider(dataProvider, initialFilter); + return setDataProvider( + (HierarchicalDataProvider) dataProvider, + initialFilter); } throw new IllegalArgumentException( "Only " + HierarchicalDataProvider.class.getName() @@ -365,153 +144,100 @@ public class HierarchicalDataCommunicator extends DataCommunicator { } /** - * Collapses given row, removing all its subtrees. Calling this method will + * Collapses given item, removing all its subtrees. Calling this method will * have no effect if the row is already collapsed. * - * @param collapsedRowKey - * the key of the row, not {@code null} - * @param collapsedRowIndex - * the index of row to collapse - * @return {@code true} if the row was collapsed, {@code false} otherwise + * @param item + * the item to collapse */ - public boolean doCollapse(String collapsedRowKey, int collapsedRowIndex) { - if (collapsedRowIndex < 0 | collapsedRowIndex >= mapper.getTreeSize()) { - throw new IllegalArgumentException("Invalid row index " - + collapsedRowIndex + " when tree grid size of " - + mapper.getTreeSize()); - } - Objects.requireNonNull(collapsedRowKey, "Row key cannot be null"); - T collapsedItem = getKeyMapper().get(collapsedRowKey); - Objects.requireNonNull(collapsedItem, - "Cannot find item for given key " + collapsedItem); - - if (mapper.isCollapsed(collapsedRowKey)) { - return false; + public void collapse(T item) { + if (mapper.isExpanded(item)) { + doCollapse(item, mapper.getIndexOf(item)); } - int collapsedSubTreeSize = mapper.collapse(collapsedRowKey, - collapsedRowIndex); - getClientRpc().removeRows(collapsedRowIndex + 1, collapsedSubTreeSize); - - // FIXME seems like a slight overkill to do this just for refreshing - // expanded status - refresh(collapsedItem); - return true; } /** - * Expands the given row. Calling this method will have no effect if the row - * is already expanded. + * Collapses given item, removing all its subtrees. Calling this method will + * have no effect if the row is already collapsed. The index is provided by + * the client-side or calculated from a full data request. + * + * @see #collapse(Object) * - * @param expandedRowKey - * the key of the row, not {@code null} - * @param expandedRowIndex - * the index of the row to expand - * @param userOriginated - * whether this expand was originated from the server or client - * @return {@code true} if the row was expanded, {@code false} otherwise + * @param item + * the item to collapse + * @param index + * the index of the item */ - public boolean doExpand(String expandedRowKey, final int expandedRowIndex, - boolean userOriginated) { - if (!userOriginated && !rowKeysPendingExpand.contains(expandedRowKey)) { - return false; - } - if (expandedRowIndex < 0 | expandedRowIndex >= mapper.getTreeSize()) { - throw new IllegalArgumentException("Invalid row index " - + expandedRowIndex + " when tree grid size of " - + mapper.getTreeSize()); - } - Objects.requireNonNull(expandedRowKey, "Row key cannot be null"); - final T expandedItem = getKeyMapper().get(expandedRowKey); - Objects.requireNonNull(expandedItem, - "Cannot find item for given key " + expandedRowKey); - - int expandedNodeSize = doSizeQuery(expandedItem); - if (expandedNodeSize == 0) { - reset(); - return false; - } - - if (!mapper.isCollapsed(expandedRowKey)) { - return false; - } - expandedNodeSize = mapper.expand(expandedRowKey, expandedRowIndex, - expandedNodeSize); - rowKeysPendingExpand.remove(expandedRowKey); - - boolean success = doPushRows( - Range.withLength(expandedRowIndex + 1, - Math.min(expandedNodeSize, latestCacheSize)), - expandedNodeSize); - - if (success) { - // FIXME seems like a slight overkill to do this just for refreshing - // expanded status - refresh(expandedItem); - return true; + public void doCollapse(T item, Optional index) { + if (mapper.isExpanded(item)) { + Range removedRows = mapper.doCollapse(item, index); + if (!reset && !removedRows.isEmpty()) { + getClientRpc().removeRows(removedRows.getStart(), + removedRows.length()); + } + refresh(item); } - return false; } /** - * Set an item as pending expansion. - *

- * Calling this method reserves a communication key for the item that is - * guaranteed to not be invalidated until the item is expanded. Has no - * effect and returns an empty optional if the given item is already - * expanded or has no children. + * Expands the given item. Calling this method will have no effect if the + * row is already expanded. * * @param item - * the item to set as pending expansion - * @return an optional of the communication key used for the item, empty if - * the item cannot be expanded + * the item to expand */ - public Optional setPendingExpand(T item) { - Objects.requireNonNull(item, "Item cannot be null"); - if (getKeyMapper().has(item) - && !mapper.isCollapsed(getKeyMapper().key(item))) { - // item is already expanded - return Optional.empty(); + public void expand(T item) { + if (!mapper.isExpanded(item) && mapper.hasChildren(item)) { + doExpand(item, mapper.getIndexOf(item)); } - if (!getDataProvider().hasChildren(item)) { - // ignore item with no children - return Optional.empty(); - } - String key = getKeyMapper().key(item); - rowKeysPendingExpand.add(key); - return Optional.of(key); } /** - * Collapse an item. - *

- * This method will either collapse an item directly, or remove its pending - * expand status. If the item is not expanded or pending expansion, calling - * this method has no effect. + * Expands the given item at given index. Calling this method will have no + * effect if the row is already expanded. The index is provided by the + * client-side or calculated from a full data request. + * + * @see #expand(Object) * * @param item - * the item to collapse - * @return an optional of the communication key used for the item, empty if - * the item cannot be collapsed + * the item to expand + * @param index + * the index of the item */ - public Optional collapseItem(T item) { - Objects.requireNonNull(item, "Item cannot be null"); - if (!getKeyMapper().has(item)) { - // keymapper should always have items that are expanded or pending - // expand - return Optional.empty(); - } - String nodeKey = getKeyMapper().key(item); - Optional node = mapper.getNodeForKey(nodeKey); - if (node.isPresent()) { - rowKeysPendingExpand.remove(nodeKey); - doCollapse(nodeKey, node.get().getStartIndex() - 1); - return Optional.of(nodeKey); - } - if (rowKeysPendingExpand.contains(nodeKey)) { - rowKeysPendingExpand.remove(nodeKey); - return Optional.of(nodeKey); + public void doExpand(T item, Optional index) { + if (!mapper.isExpanded(item)) { + Range addedRows = mapper.doExpand(item, index); + if (!reset && !addedRows.isEmpty()) { + int start = addedRows.getStart(); + getClientRpc().insertRows(start, addedRows.length()); + Stream children = mapper.fetchItems(item, + Range.withLength(0, addedRows.length())); + pushData(start, children.collect(Collectors.toList())); + } + refresh(item); } - return Optional.empty(); + } + + /** + * Returns whether given item has children. + * + * @param item + * the item to test + * @return {@code true} if item has children; {@code false} if not + */ + public boolean hasChildren(T item) { + return mapper.hasChildren(item); + } + + /** + * Returns whether given item is expanded. + * + * @param item + * the item to test + * @return {@code true} if item is expanded; {@code false} if not + */ + public boolean isExpanded(T item) { + return mapper.isExpanded(item); } /** @@ -529,19 +255,21 @@ public class HierarchicalDataCommunicator extends DataCommunicator { ItemCollapseAllowedProvider provider) { Objects.requireNonNull(provider, "Provider can't be null"); itemCollapseAllowedProvider = provider; + // Update hierarchy mapper + mapper.setItemCollapseAllowedProvider(provider); getActiveDataHandler().getActiveData().values().forEach(this::refresh); } /** - * Returns parent index for the row or {@code null} + * Returns parent index for the row or {@code null}. * - * @param rowIndex - * the row index + * @param item + * the item to find the parent of * @return the parent index or {@code null} for top-level items */ - public Integer getParentIndex(int rowIndex) { - return mapper.getParentIndex(rowIndex); + public Integer getParentIndex(T item) { + return mapper.getParentIndex(item); } /** @@ -552,4 +280,33 @@ public class HierarchicalDataCommunicator extends DataCommunicator { public ItemCollapseAllowedProvider getItemCollapseAllowedProvider() { return itemCollapseAllowedProvider; } + + @Override + protected int getDataProviderSize() { + return mapper.getTreeSize(); + } + + @Override + public void setBackEndSorting(List sortOrder) { + if (mapper != null) { + mapper.setBackEndSorting(sortOrder); + } + super.setBackEndSorting(sortOrder); + } + + @Override + public void setInMemorySorting(Comparator comparator) { + if (mapper != null) { + mapper.setInMemorySorting(comparator); + } + super.setInMemorySorting(comparator); + } + + @Override + protected void setFilter(F filter) { + if (mapper != null) { + mapper.setFilter(filter); + } + super.setFilter(filter); + } } diff --git a/server/src/main/java/com/vaadin/data/provider/HierarchyMapper.java b/server/src/main/java/com/vaadin/data/provider/HierarchyMapper.java index ab080aa450..ebed464fde 100644 --- a/server/src/main/java/com/vaadin/data/provider/HierarchyMapper.java +++ b/server/src/main/java/com/vaadin/data/provider/HierarchyMapper.java @@ -15,22 +15,27 @@ */ package com.vaadin.data.provider; -import java.io.Serializable; -import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.Set; -import java.util.TreeSet; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BiConsumer; -import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; +import com.vaadin.shared.Range; +import com.vaadin.shared.data.HierarchicalDataCommunicatorConstants; +import com.vaadin.ui.ItemCollapseAllowedProvider; + +import elemental.json.Json; +import elemental.json.JsonObject; + /** * Mapper for hierarchical data. *

@@ -43,484 +48,465 @@ import java.util.stream.Stream; * * @author Vaadin Ltd * @since 8.1 + * + * @param + * the data type + * @param + * the filter type */ -class HierarchyMapper implements Serializable { +public class HierarchyMapper implements DataGenerator { + + // childMap is only used for finding parents of items and clean up on + // removing children of expanded nodes. + private Map> childMap = new HashMap<>(); - private static final Logger LOGGER = Logger - .getLogger(HierarchyMapper.class.getName()); + private final HierarchicalDataProvider provider; + private F filter; + private List backEndSorting; + private Comparator inMemorySorting; + private ItemCollapseAllowedProvider itemCollapseAllowedProvider = t -> true; + + private Set expandedItemIds = new HashSet<>(); /** - * A POJO that represents a query data for a certain tree level. + * Constructs a new HierarchyMapper. + * + * @param provider + * the hierarchical data provider for this mapper */ - static class TreeLevelQuery { // not serializable since not stored - /** - * The tree node that the query is for. Only used for fetching parent - * key. - */ - final TreeNode node; - /** The start index of the query, from 0 to level's size - 1. */ - final int startIndex; - /** The number of rows to fetch. s */ - final int size; - /** The depth of this node. */ - final int depth; - /** The first row index in grid, including all the nodes. */ - final int firstRowIndex; - /** The direct subtrees for the node that effect the indexing. */ - final List subTrees; - - TreeLevelQuery(TreeNode node, int startIndex, int size, int depth, - int firstRowIndex, List subTrees) { - this.node = node; - this.startIndex = startIndex; - this.size = size; - this.depth = depth; - this.firstRowIndex = firstRowIndex; - this.subTrees = subTrees; - } + public HierarchyMapper(HierarchicalDataProvider provider) { + this.provider = provider; } /** - * A level in the tree, either the root level or an expanded subtree level. - *

- * Comparable based on the {@link #startIndex}, which is flat from 0 to data - * size - 1. + * Returns the size of the currently expanded hierarchy. + * + * @return the amount of available data */ - static class TreeNode implements Serializable, Comparable { - - /** The key for the expanded item that this is a subtree of. */ - private final String parentKey; - /** The first index on this level. */ - private int startIndex; - /** The last index on this level, INCLUDING subtrees. */ - private int endIndex; - - TreeNode(String parentKey, int startIndex, int size) { - this.parentKey = parentKey; - this.startIndex = startIndex; - endIndex = startIndex + size - 1; - } + public int getTreeSize() { + return (int) getHierarchy(null).count(); + } - TreeNode(int startIndex) { - parentKey = "INVALID"; - this.startIndex = startIndex; - } + /** + * Finds the index of the parent of the item in given target index. + * + * @param item + * the item to get the parent of + * @return the parent index + * + * @throws IllegalArgumentException + * if given target index is not found + */ + public Integer getParentIndex(T item) throws IllegalArgumentException { + // TODO: This can be optimised. + List flatHierarchy = getHierarchy(null).collect(Collectors.toList()); + return flatHierarchy.indexOf(getParentOfItem(item)); + } - int getStartIndex() { - return startIndex; + /** + * Returns whether the given item is expanded. + * + * @param item + * the item to test + * @return {@code true} if item is expanded; {@code false} if not + */ + public boolean isExpanded(T item) { + if (item == null) { + // Root nodes are always visible. + return true; } + return expandedItemIds.contains(getDataProvider().getId(item)); + } - int getEndIndex() { - return endIndex; + /** + * Expands the given item. + * + * @param item + * the item to expand + * @param position + * the index of item + * @return range of rows added by expanding the item + */ + public Range doExpand(T item, Optional position) { + Range rows = Range.withLength(0, 0); + if (!isExpanded(item) && hasChildren(item)) { + Object id = getDataProvider().getId(item); + expandedItemIds.add(id); + if (position.isPresent()) { + rows = Range.withLength(position.get() + 1, + (int) getHierarchy(item, false).count()); + } } + return rows; + } - String getParentKey() { - return parentKey; + /** + * Collapses the given item. + * + * @param item + * the item to expand + * @param position + * the index of item + * + * @return range of rows removed by collapsing the item + */ + public Range doCollapse(T item, Optional position) { + Range removedRows = Range.withLength(0, 0); + if (isExpanded(item)) { + Object id = getDataProvider().getId(item); + if (position.isPresent()) { + long childCount = getHierarchy(item, false).count(); + removedRows = Range.withLength(position.get() + 1, + (int) childCount); + } + expandedItemIds.remove(id); } + return removedRows; + } - private void push(int offset) { - startIndex += offset; - endIndex += offset; - } + @Override + public void generateData(T item, JsonObject jsonObject) { + JsonObject hierarchyData = Json.createObject(); - private void pushEnd(int offset) { - endIndex += offset; + int depth = getDepth(item); + if (depth >= 0) { + hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_DEPTH, + depth); } - @Override - public int compareTo(TreeNode other) { - return Integer.valueOf(startIndex).compareTo(other.startIndex); + boolean isLeaf = !getDataProvider().hasChildren(item); + if (isLeaf) { + hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_LEAF, + true); + } else { + hierarchyData.put( + HierarchicalDataCommunicatorConstants.ROW_COLLAPSED, + !isExpanded(item)); + hierarchyData.put(HierarchicalDataCommunicatorConstants.ROW_LEAF, + false); + hierarchyData.put( + HierarchicalDataCommunicatorConstants.ROW_COLLAPSE_ALLOWED, + getItemCollapseAllowedProvider().test(item)); } - @Override - public String toString() { - return "TreeNode [parent=" + parentKey + ", start=" + startIndex - + ", end=" + getEndIndex() + "]"; - } + // add hierarchy information to row as metadata + jsonObject.put( + HierarchicalDataCommunicatorConstants.ROW_HIERARCHY_DESCRIPTION, + hierarchyData); + } + /** + * Gets the current item collapse allowed provider. + * + * @return the item collapse allowed provider + */ + public ItemCollapseAllowedProvider getItemCollapseAllowedProvider() { + return itemCollapseAllowedProvider; } - /** The expanded nodes in the tree. */ - private final TreeSet nodes = new TreeSet<>(); + /** + * Sets the current item collapse allowed provider. + * + * @param itemCollapseAllowedProvider + * the item collapse allowed provider + */ + public void setItemCollapseAllowedProvider( + ItemCollapseAllowedProvider itemCollapseAllowedProvider) { + this.itemCollapseAllowedProvider = itemCollapseAllowedProvider; + } /** - * Map of collapsed subtrees. The keys of this map are the collapsed - * subtrees parent keys and values are the {@code TreeSet}s of the subtree's - * expanded {@code TreeNode}s. + * Gets the current in-memory sorting. + * + * @return the in-memory sorting */ - private final Map> collapsedNodes = new HashMap<>(); + public Comparator getInMemorySorting() { + return inMemorySorting; + } /** - * Resets the tree, sets given the root level size. - * - * @param rootLevelSize - * the number of items in the root level + * Sets the current in-memory sorting. This will cause the hierarchy to be + * constructed again. + * + * @param inMemorySorting + * the in-memory sorting */ - public void reset(int rootLevelSize) { - collapsedNodes.clear(); - nodes.clear(); - nodes.add(new TreeNode(null, 0, rootLevelSize)); + public void setInMemorySorting(Comparator inMemorySorting) { + this.inMemorySorting = inMemorySorting; } /** - * Returns the complete size of the tree, including all expanded subtrees. - * - * @return the size of the tree + * Gets the current back-end sorting. + * + * @return the back-end sorting */ - public int getTreeSize() { - TreeNode rootNode = getNodeForKey(null) - .orElse(new TreeNode(null, 0, 0)); - return rootNode.endIndex + 1; + public List getBackEndSorting() { + return backEndSorting; } /** - * Returns whether the node with the given is collapsed or not. - * - * @param itemKey - * the key of node to check - * @return {@code true} if collapsed, {@code false} if expanded + * Sets the current back-end sorting. This will cause the hierarchy to be + * constructed again. + * + * @param backEndSorting + * the back-end sorting */ - public boolean isCollapsed(String itemKey) { - return !getNodeForKey(itemKey).isPresent(); + public void setBackEndSorting(List backEndSorting) { + this.backEndSorting = backEndSorting; } /** - * Return whether the given item key is still being used in this mapper. - * - * @param itemKey - * the item key to look for - * @return {@code true} if the item key is still used, {@code false} - * otherwise + * Gets the current filter. + * + * @return the filter */ - public boolean isKeyStored(String itemKey) { - if (getNodeForKey(itemKey).isPresent()) { - return true; - } - // Is the key used in a collapsed subtree? - for (Entry> entry : collapsedNodes.entrySet()) { - if (entry.getKey() != null && entry.getKey().equals(itemKey)) { - return true; - } - for (TreeNode subTreeNode : entry.getValue()) { - if (subTreeNode.getParentKey() != null - && subTreeNode.getParentKey().equals(itemKey)) { - return true; - } - } - } - return false; + public F getFilter() { + return filter; } /** - * Return the depth of expanded node's subtree. - *

- * The root node depth is 0. - * - * @param expandedNodeKey - * the item key of the expanded node - * @return the depth of the expanded node - * @throws IllegalArgumentException - * if the node was not expanded + * Sets the current filter. This will cause the hierarchy to be constructed + * again. + * + * @param filter + * the filter */ - protected int getDepth(String expandedNodeKey) { - Optional node = getNodeForKey(expandedNodeKey); - if (!node.isPresent()) { - throw new IllegalArgumentException("No node with given key " - + expandedNodeKey + " was expanded."); - } - TreeNode treeNode = node.get(); - AtomicInteger start = new AtomicInteger(treeNode.startIndex); - AtomicInteger end = new AtomicInteger(treeNode.getEndIndex()); - AtomicInteger depth = new AtomicInteger(); - nodes.headSet(treeNode, false).descendingSet().forEach(higherNode -> { - if (higherNode.startIndex < start.get() - && higherNode.getEndIndex() >= end.get()) { - start.set(higherNode.startIndex); - depth.incrementAndGet(); - } - }); + public void setFilter(Object filter) { + this.filter = (F) filter; + } - return depth.get(); + /** + * Gets the {@code HierarchicalDataProvider} for this + * {@code HierarchyMapper}. + * + * @return the hierarchical data provider + */ + public HierarchicalDataProvider getDataProvider() { + return provider; } /** - * Returns the tree node for the given expanded item key, or an empty - * optional if the item was not expanded. - * - * @param expandedNodeKey - * the key of the item - * @return the tree node for the expanded item, or an empty optional if not - * expanded + * Returns whether given item has children. + * + * @param item + * the node to test + * @return {@code true} if node has children; {@code false} if not */ - protected Optional getNodeForKey(String expandedNodeKey) { - return nodes.stream() - .filter(node -> Objects.equals(node.parentKey, expandedNodeKey)) - .findAny(); + public boolean hasChildren(T item) { + return getDataProvider().hasChildren(item); } + /* Fetch methods. These are used to calculate what to request. */ + /** - * Expands the node in the given index and with the given key. - * - * @param expandedRowKey - * the key of the expanded item - * @param expandedRowIndex - * the index of the expanded item - * @param expandedNodeSize - * the size of the subtree of the expanded node, used if - * previously unknown - * @throws IllegalStateException - * if the node was expanded already - * @return the actual size of the expand + * Gets a stream of items in the form of a flattened hierarchy from the + * back-end and filter the wanted results from the list. + * + * @param range + * the requested item range + * @return the stream of items */ - protected int expand(String expandedRowKey, int expandedRowIndex, - int expandedNodeSize) { - if (expandedNodeSize < 1) { - throw new IllegalArgumentException( - "The expanded node's size cannot be less than 1, was " - + expandedNodeSize); - } - TreeNode newNode; - TreeSet subTree = null; - if (collapsedNodes.containsKey(expandedRowKey)) { - subTree = collapsedNodes.remove(expandedRowKey); - newNode = subTree.first(); - int offset = expandedRowIndex - newNode.getStartIndex() + 1; - subTree.forEach(node -> node.push(offset)); - expandedNodeSize = newNode.getEndIndex() - newNode.getStartIndex() - + 1; - } else { - newNode = new TreeNode(expandedRowKey, expandedRowIndex + 1, - expandedNodeSize); - } + public Stream fetchItems(Range range) { + return getHierarchy(null).skip(range.getStart()).limit(range.length()); + } - boolean added = nodes.add(newNode); - if (!added) { - throw new IllegalStateException("Node in index " + expandedRowIndex - + " was expanded already."); - } + /** + * Gets a stream of children for the given item in the form of a flattened + * hierarchy from the back-end and filter the wanted results from the list. + * + * @param parent + * the parent item for the fetch + * @param range + * the requested item range + * @return the stream of items + */ + public Stream fetchItems(T parent, Range range) { + return getHierarchy(parent, false).skip(range.getStart()) + .limit(range.length()); + } - // push end indexes for parent nodes - final int expandSize = expandedNodeSize; - List updated = nodes.headSet(newNode, false).stream() - .filter(node -> node.getEndIndex() >= expandedRowIndex) - .collect(Collectors.toList()); - nodes.removeAll(updated); - updated.stream().forEach(node -> node.pushEnd(expandSize)); - nodes.addAll(updated); + /* Methods for providing information on the hierarchy. */ - // push start and end indexes for later nodes - updated = nodes.tailSet(newNode, false).stream() - .collect(Collectors.toList()); - nodes.removeAll(updated); - updated.stream().forEach(node -> node.push(expandSize)); - nodes.addAll(updated); + /** + * Generic method for finding direct children of a given parent, limited by + * given range. + * + * @param parent + * the parent + * @param range + * the range of direct children to return + * @return the requested children of the given parent + */ + @SuppressWarnings({ "rawtypes", "unchecked" }) + private Stream doFetchDirectChildren(T parent, Range range) { + return getDataProvider().fetchChildren(new HierarchicalQuery( + range.getStart(), range.length(), getBackEndSorting(), + getInMemorySorting(), getFilter(), parent)); + } - if (subTree != null) { - nodes.addAll(subTree); + private int getDepth(T item) { + int depth = -1; + while (item != null) { + item = getParentOfItem(item); + ++depth; } + return depth; + } - return expandSize; + private T getParentOfItem(T item) { + Objects.requireNonNull(item, "Can not find the parent of null"); + Object itemId = getDataProvider().getId(item); + for (Entry> entry : childMap.entrySet()) { + if (entry.getValue().stream().map(getDataProvider()::getId) + .anyMatch(id -> id.equals(itemId))) { + return entry.getKey(); + } + } + return null; } /** - * Collapses the node in the given index. - * - * @param key - * the key of the collapsed item - * @param collapsedRowIndex - * the index of the collapsed item - * @return the size of the complete subtree that was collapsed - * @throws IllegalStateException - * if the node was not collapsed, or if the given key is not the - * same as it was when the node has been expanded + * Removes all children of an item identified by a given id. Items removed + * by this method as well as the original item are all marked to be + * collapsed. + * + * @param id + * the item id */ - protected int collapse(String key, int collapsedRowIndex) { - Objects.requireNonNull(key, - "The key for the item to collapse cannot be null."); - TreeNode collapsedNode = nodes - .ceiling(new TreeNode(collapsedRowIndex + 1)); - if (collapsedNode == null - || collapsedNode.startIndex != collapsedRowIndex + 1) { - throw new IllegalStateException( - "Could not find expanded node for index " - + collapsedRowIndex + ", node was not collapsed"); - } - if (!Objects.equals(key, collapsedNode.parentKey)) { - throw new IllegalStateException("The expected parent key " + key - + " is different for the collapsed node " + collapsedNode); + private void removeChildren(Object id) { + // Clean up removed nodes from child map + Iterator>> iterator = childMap.entrySet().iterator(); + Set invalidatedChildren = new HashSet<>(); + while (iterator.hasNext()) { + Entry> entry = iterator.next(); + T key = entry.getKey(); + if (key != null && getDataProvider().getId(key).equals(id)) { + invalidatedChildren.addAll(entry.getValue()); + iterator.remove(); + } } + expandedItemIds.remove(id); + invalidatedChildren.stream().map(getDataProvider()::getId) + .forEach(this::removeChildren); + } - Set subTreeNodes = nodes - .tailSet(collapsedNode).stream().filter(node -> collapsedNode - .getEndIndex() >= node.getEndIndex()) - .collect(Collectors.toSet()); - collapsedNodes.put(collapsedNode.getParentKey(), new TreeSet<>(subTreeNodes)); - - // remove complete subtree - AtomicInteger removedSubTreeSize = new AtomicInteger( - collapsedNode.getEndIndex() - collapsedNode.startIndex + 1); - nodes.tailSet(collapsedNode, false).removeIf( - node -> node.startIndex <= collapsedNode.getEndIndex()); - - final int offset = -1 * removedSubTreeSize.get(); - // adjust parent end indexes - List updated = nodes.headSet(collapsedNode, false).stream() - .filter(node -> node.getEndIndex() >= collapsedRowIndex) - .collect(Collectors.toList()); - nodes.removeAll(updated); - updated.stream().forEach(node -> node.pushEnd(offset)); - nodes.addAll(updated); + /** + * Finds the current index of given object. This is based on a search in + * flattened version of the hierarchy. + * + * @param target + * the target object to find + * @return optional index of given object + */ + public Optional getIndexOf(T target) { + if (target == null) { + return Optional.empty(); + } - // adjust start and end indexes for latter nodes - updated = nodes.tailSet(collapsedNode, false).stream() + final List collect = getHierarchy(null).map(provider::getId) .collect(Collectors.toList()); - nodes.removeAll(updated); - updated.stream().forEach(node -> node.push(offset)); - nodes.addAll(updated); + int index = collect.indexOf(getDataProvider().getId(target)); + return Optional.ofNullable(index < 0 ? null : index); + } - nodes.remove(collapsedNode); + /** + * Gets the full hierarchy tree starting from given node. + * + * @param parent + * the parent node to start from + * @return the flattened hierarchy as a stream + */ + private Stream getHierarchy(T parent) { + return getHierarchy(parent, true); + } - return removedSubTreeSize.get(); + /** + * Getst hte full hierarchy tree starting from given node. The starting node + * can be omitted. + * + * @param parent + * the parent node to start from + * @param includeParent + * {@code true} to include the parent; {@code false} if not + * @return the flattened hierarchy as a stream + */ + private Stream getHierarchy(T parent, boolean includeParent) { + return Stream.of(parent) + .flatMap(node -> getChildrenStream(node, includeParent)); } /** - * Splits the given range into queries per tree level. - * - * @param firstRow - * the first row to fetch - * @param lastRow - * the last row to fetch - * @return a stream of query data per level - * @see #reorderLevelQueryResultsToFlatOrdering(BiConsumer, TreeLevelQuery, - * List) + * Gets the stream of direct children for given node. + * + * @param parent + * the parent node + * @return the stream of direct children */ - protected Stream splitRangeToLevelQueries( - final int firstRow, final int lastRow) { - return nodes.stream() - // filter to parts intersecting with the range - .filter(node -> node.startIndex <= lastRow - && firstRow <= node.getEndIndex()) - // split into queries per level with level based indexing - .map(node -> { - - // calculate how subtrees effect indexing and size - int depth = getDepth(node.parentKey); - List directSubTrees = nodes.tailSet(node, false) - .stream() - // find subtrees - .filter(subTree -> node.startIndex < subTree - .getEndIndex() - && subTree.startIndex < node.getEndIndex()) - // filter to direct subtrees - .filter(subTree -> getDepth( - subTree.parentKey) == (depth + 1)) - .collect(Collectors.toList()); - // first intersecting index in flat order - AtomicInteger firstIntersectingRowIndex = new AtomicInteger( - Math.max(node.startIndex, firstRow)); - // last intersecting index in flat order - final int lastIntersectingRowIndex = Math - .min(node.getEndIndex(), lastRow); - // start index for this level - AtomicInteger start = new AtomicInteger( - firstIntersectingRowIndex.get() - node.startIndex); - // how many nodes should be fetched for this level - AtomicInteger size = new AtomicInteger( - lastIntersectingRowIndex - - firstIntersectingRowIndex.get() + 1); - - // reduce subtrees before requested index - directSubTrees.stream().filter(subtree -> subtree - .getEndIndex() < firstIntersectingRowIndex.get()) - .forEachOrdered(subtree -> { - start.addAndGet(-1 * (subtree.getEndIndex() - - subtree.startIndex + 1)); - }); - // if requested start index is in the middle of a - // subtree, start is after that - List intersectingSubTrees = new ArrayList<>(); - directSubTrees.stream() - .filter(subtree -> subtree.startIndex <= firstIntersectingRowIndex - .get() && firstIntersectingRowIndex - .get() <= subtree.getEndIndex()) - .findFirst().ifPresent(subtree -> { - int previous = firstIntersectingRowIndex - .getAndSet(subtree.getEndIndex() + 1); - int delta = previous - - firstIntersectingRowIndex.get(); - start.addAndGet(subtree.startIndex - previous); - size.addAndGet(delta); - intersectingSubTrees.add(subtree); - }); - // reduce size of subtrees after first row that intersect - // with requested range - directSubTrees.stream() - .filter(subtree -> firstIntersectingRowIndex - .get() < subtree.startIndex - && subtree.endIndex <= lastIntersectingRowIndex) - .forEachOrdered(subtree -> { - // reduce subtree size that is part of the - // requested range from query size - size.addAndGet( - -1 * (Math.min(subtree.getEndIndex(), - lastIntersectingRowIndex) - - subtree.startIndex + 1)); - intersectingSubTrees.add(subtree); - }); - return new TreeLevelQuery(node, start.get(), size.get(), - depth, firstIntersectingRowIndex.get(), - intersectingSubTrees); - - }).filter(query -> query.size > 0); + private Stream getDirectChildren(T parent) { + return doFetchDirectChildren(parent, Range.between(0, getDataProvider() + .getChildCount(new HierarchicalQuery<>(filter, parent)))); + } + /** + * The method to recursively fetch the children of given parent. Used with + * {@link Stream#flatMap} to expand a stream of parent nodes into a + * flattened hierarchy. + * + * @param parent + * the parent node + * @return the stream of all children under the parent, includes the parent + */ + private Stream getChildrenStream(T parent) { + return getChildrenStream(parent, true); } /** - * Merges the tree level query results into flat grid ordering. - * - * @param rangePositionCallback - * the callback to place the results into - * @param query - * the query data for the results - * @param results - * the results to reorder - * @param - * the type of the results + * The method to recursively fetch the children of given parent. Used with + * {@link Stream#flatMap} to expand a stream of parent nodes into a + * flattened hierarchy. + * + * @param parent + * the parent node + * @param includeParent + * {@code true} to include the parent in the stream; + * {@code false} if not + * @return the stream of all children under the parent */ - protected void reorderLevelQueryResultsToFlatOrdering( - BiConsumer rangePositionCallback, TreeLevelQuery query, - List results) { - AtomicInteger nextPossibleIndex = new AtomicInteger( - query.firstRowIndex); - for (T item : results) { - // search for any intersecting subtrees and push index if necessary - query.subTrees.stream().filter( - subTree -> subTree.startIndex <= nextPossibleIndex.get() - && nextPossibleIndex.get() <= subTree.getEndIndex()) - .findAny().ifPresent(intersecting -> { - nextPossibleIndex.addAndGet(intersecting.getEndIndex() - - intersecting.startIndex + 1); - query.subTrees.remove(intersecting); - }); - rangePositionCallback.accept(item, - nextPossibleIndex.getAndIncrement()); + private Stream getChildrenStream(T parent, boolean includeParent) { + List childList = Collections.emptyList(); + if (isExpanded(parent)) { + childList = getDirectChildren(parent).collect(Collectors.toList()); + if (childList.isEmpty()) { + removeChildren(parent == null ? null + : getDataProvider().getId(parent)); + } else { + childMap.put(parent, new HashSet<>(childList)); + } } + return combineParentAndChildStreams(parent, + childList.stream().flatMap(this::getChildrenStream), + includeParent); } /** - * Returns parent index for the row or {@code null} - * - * @param rowIndex the row index - * @return the parent index or {@code null} for top-level items + * Helper method for combining parent and a stream of children into one + * stream. {@code null} item is never included, and parent can be skipped by + * providing the correct value for {@code includeParent}. + * + * @param parent + * the parent node + * @param children + * the stream of children + * @param includeParent + * {@code true} to include the parent in the stream; + * {@code false} if not + * @return the combined stream of parent and its children */ - public Integer getParentIndex(int rowIndex) { - return nodes.stream() - .filter(treeNode -> treeNode.getParentKey() != null - && treeNode.getStartIndex() <= rowIndex - && treeNode.getEndIndex() >= rowIndex) - .min((a, b) -> Math.min(a.getEndIndex() - a.getStartIndex(), - b.getEndIndex() - b.getStartIndex())) - .map(treeNode -> treeNode.getStartIndex() - 1) - .orElse(null); + private Stream combineParentAndChildStreams(T parent, Stream children, + boolean includeParent) { + boolean parentIncluded = includeParent && parent != null; + Stream parentStream = parentIncluded ? Stream.of(parent) + : Stream.empty(); + return Stream.concat(parentStream, children); } } diff --git a/server/src/main/java/com/vaadin/ui/TreeGrid.java b/server/src/main/java/com/vaadin/ui/TreeGrid.java index fdda7226fe..d32ff1464a 100644 --- a/server/src/main/java/com/vaadin/ui/TreeGrid.java +++ b/server/src/main/java/com/vaadin/ui/TreeGrid.java @@ -127,26 +127,28 @@ public class TreeGrid extends Grid @Override public void setNodeCollapsed(String rowKey, int rowIndex, boolean collapse, boolean userOriginated) { - if (collapse) { - if (getDataCommunicator().doCollapse(rowKey, rowIndex) - && userOriginated) { - fireCollapseEvent(getDataCommunicator().getKeyMapper() - .get(rowKey), true); - } - } else { - if (getDataCommunicator().doExpand(rowKey, rowIndex, - userOriginated) && userOriginated) { - fireExpandEvent(getDataCommunicator().getKeyMapper() - .get(rowKey), true); - } + T item = getDataCommunicator().getKeyMapper().get(rowKey); + if (collapse && getDataCommunicator().isExpanded(item)) { + getDataCommunicator().doCollapse(item, + Optional.of(rowIndex)); + fireCollapseEvent( + getDataCommunicator().getKeyMapper().get(rowKey), + userOriginated); + } else if (!collapse + && !getDataCommunicator().isExpanded(item)) { + getDataCommunicator().doExpand(item, Optional.of(rowIndex)); + fireExpandEvent( + getDataCommunicator().getKeyMapper().get(rowKey), + userOriginated); } } }); + registerRpc(new FocusParentRpc() { @Override - public void focusParent(int rowIndex, int cellIndex) { - Integer parentIndex = getDataCommunicator() - .getParentIndex(rowIndex); + public void focusParent(String rowKey, int cellIndex) { + Integer parentIndex = getDataCommunicator().getParentIndex( + getDataCommunicator().getKeyMapper().get(rowKey)); if (parentIndex != null) { getRpcProxy(FocusRpc.class).focusCell(parentIndex, cellIndex); @@ -341,15 +343,14 @@ public class TreeGrid extends Grid * the items to expand */ public void expand(Collection items) { - List expandedKeys = new ArrayList<>(); - List expandedItems = new ArrayList<>(); - items.forEach(item -> getDataCommunicator().setPendingExpand(item) - .ifPresent(key -> { - expandedKeys.add(key); - expandedItems.add(item); - })); - getRpcProxy(TreeGridClientRpc.class).setExpanded(expandedKeys); - expandedItems.forEach(item -> fireExpandEvent(item, false)); + HierarchicalDataCommunicator communicator = getDataCommunicator(); + items.forEach(item -> { + if (!communicator.isExpanded(item) + && communicator.hasChildren(item)) { + communicator.expand(item); + fireExpandEvent(item, false); + } + }); } /** @@ -373,15 +374,13 @@ public class TreeGrid extends Grid * the collection of items to collapse */ public void collapse(Collection items) { - List collapsedKeys = new ArrayList<>(); - List collapsedItems = new ArrayList<>(); - items.forEach(item -> getDataCommunicator().collapseItem(item) - .ifPresent(key -> { - collapsedKeys.add(key); - collapsedItems.add(item); - })); - getRpcProxy(TreeGridClientRpc.class).setCollapsed(collapsedKeys); - collapsedItems.forEach(item -> fireCollapseEvent(item, false)); + HierarchicalDataCommunicator communicator = getDataCommunicator(); + items.forEach(item -> { + if (communicator.isExpanded(item)) { + communicator.collapse(item); + fireCollapseEvent(item, false); + } + }); } @Override diff --git a/server/src/test/java/com/vaadin/data/provider/HierarchyMapperTest.java b/server/src/test/java/com/vaadin/data/provider/HierarchyMapperTest.java deleted file mode 100644 index 1f48578eea..0000000000 --- a/server/src/test/java/com/vaadin/data/provider/HierarchyMapperTest.java +++ /dev/null @@ -1,147 +0,0 @@ -package com.vaadin.data.provider; - -import java.util.Optional; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import com.vaadin.data.provider.HierarchyMapper.TreeNode; - -public class HierarchyMapperTest { - - private HierarchyMapper mapper; - - @Before - public void setup() { - mapper = new HierarchyMapper(); - } - - @Test - public void testExpandCollapse_rootLevel_indexesUpdated() { - mapper.reset(3); - verifyRootLevel(0, 2); - - mapper.expand("1", 1, 3); - - verifyTreeTotalSize(6); - verifyRootLevel(0, 5); - verifyNodeExists("1", 2, 4); - - mapper.expand("0", 0, 3); - - verifyRootLevel(0, 8); - verifyNodeExists("0", 1, 3); - verifyNodeExists("1", 5, 7); - verifyTreeTotalSize(9); - - mapper.collapse("0", 0); - - verifyRootLevel(0, 5); - verifyNodeExists("1", 2, 4); - verifyTreeTotalSize(6); - verifyNoNodeExists("0"); - } - - @Test - public void testExpandCollapse_secondLevelLastNode_indexesUpdated() { - mapper.reset(3); - verifyRootLevel(0, 2); - - mapper.expand("1", 1, 3); - - verifyTreeTotalSize(6); - verifyRootLevel(0, 5); - verifyNodeExists("1", 2, 4); - - mapper.expand("0", 0, 3); - - verifyRootLevel(0, 8); - verifyNodeExists("0", 1, 3); - verifyNodeExists("1", 5, 7); - verifyTreeTotalSize(9); - - mapper.expand("2", 3, 3); - - verifyRootLevel(0, 11); - verifyNodeExists("0", 1, 6); - verifyNodeExists("1", 8, 10); - verifyNodeExists("2", 4, 6); - verifyTreeTotalSize(12); - - mapper.collapse("2", 3); - - verifyRootLevel(0, 8); - verifyNodeExists("0", 1, 3); - verifyNodeExists("1", 5, 7); - verifyNoNodeExists("2"); - verifyTreeTotalSize(9); - - mapper.collapse("0", 0); - - verifyRootLevel(0, 5); - verifyNodeExists("1", 2, 4); - verifyNoNodeExists("0"); - verifyTreeTotalSize(6); - } - - @Test - public void testCollapse_multipleLevels_wholeSubtreeDropped() { - // expand hierarchy up to 3 level - mapper.reset(5); - verifyRootLevel(0, 4); - - mapper.expand("1", 2, 2); - - verifyRootLevel(0, 6); - verifyNodeExists("1", 3, 4); - verifyTreeTotalSize(7); - - mapper.expand("2", 3, 2); - - verifyRootLevel(0, 8); - verifyNodeExists("1", 3, 6); - verifyNodeExists("2", 4, 5); - verifyTreeTotalSize(9); - - mapper.expand("3", 6, 2); - verifyRootLevel(0, 10); - verifyNodeExists("1", 3, 8); - verifyNodeExists("2", 4, 5); - verifyNodeExists("3", 7, 8); - verifyTreeTotalSize(11); - - // collapse root level node - mapper.collapse("1", 2); - verifyRootLevel(0, 4); - verifyNoNodeExists("1", "2", "3"); - } - - private void verifyRootLevel(int start, int end) { - verifyNode(start, end, mapper.getNodeForKey(null).get()); - } - - private void verifyNodeExists(String key, int start, int end) { - Optional node = mapper.getNodeForKey(key); - Assert.assertTrue("NO NODE FOUND FOR KEY: " + key, node.isPresent()); - verifyNode(start, end, node.get()); - } - - private void verifyNoNodeExists(String... nodeKeys) { - for (String key : nodeKeys) { - Assert.assertFalse("No node should exist for key " + key, - mapper.getNodeForKey(key).isPresent()); - } - } - - private void verifyNode(int start, int end, TreeNode node) { - Assert.assertEquals("Invalid start for node " + node, start, - node.getStartIndex()); - Assert.assertEquals("Invalid end for node " + node, end, - node.getEndIndex()); - } - - private void verifyTreeTotalSize(int size) { - Assert.assertEquals("Invalid tree size", size, mapper.getTreeSize()); - } -} diff --git a/server/src/test/java/com/vaadin/data/provider/hierarchical/HierarchyMapperWithDataTest.java b/server/src/test/java/com/vaadin/data/provider/hierarchical/HierarchyMapperWithDataTest.java new file mode 100644 index 0000000000..411bf9de0d --- /dev/null +++ b/server/src/test/java/com/vaadin/data/provider/hierarchical/HierarchyMapperWithDataTest.java @@ -0,0 +1,264 @@ +package com.vaadin.data.provider.hierarchical; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.vaadin.data.TreeData; +import com.vaadin.data.provider.HierarchyMapper; +import com.vaadin.data.provider.TreeDataProvider; +import com.vaadin.server.SerializablePredicate; +import com.vaadin.shared.Range; +import com.vaadin.shared.data.DataCommunicatorClientRpc; + +import elemental.json.JsonArray; + +public class HierarchyMapperWithDataTest { + + private static final int ROOT_COUNT = 5; + private static final int PARENT_COUNT = 4; + private static final int LEAF_COUNT = 2; + + private static TreeData data = new TreeData<>(); + private TreeDataProvider provider; + private HierarchyMapper> mapper; + private static List testData; + private static List roots; + private int mapSize = ROOT_COUNT; + + @BeforeClass + public static void setupData() { + testData = generateTestData(); + roots = testData.stream().filter(item -> item.getParent() == null) + .collect(Collectors.toList()); + data.addItems(roots, + parent -> testData.stream().filter( + item -> Objects.equals(item.getParent(), parent)) + .collect(Collectors.toList())); + } + + @Before + public void setup() { + provider = new TreeDataProvider<>(data); + mapper = new HierarchyMapper<>(provider); + } + + @Test + public void expandRootNode() { + Assert.assertEquals("Map size should be equal to root node count", + ROOT_COUNT, mapper.getTreeSize()); + expand(testData.get(0)); + Assert.assertEquals("Should be root count + once parent count", + ROOT_COUNT + PARENT_COUNT, mapper.getTreeSize()); + checkMapSize(); + } + + @Test + public void expandAndCollapseLastRootNode() { + Assert.assertEquals("Map size should be equal to root node count", + ROOT_COUNT, mapper.getTreeSize()); + expand(roots.get(roots.size() - 1)); + Assert.assertEquals("Should be root count + once parent count", + ROOT_COUNT + PARENT_COUNT, mapper.getTreeSize()); + checkMapSize(); + collapse(roots.get(roots.size() - 1)); + Assert.assertEquals("Map size should be equal to root node count again", + ROOT_COUNT, mapper.getTreeSize()); + checkMapSize(); + } + + @Test + public void expandHiddenNode() { + Assert.assertEquals("Map size should be equal to root node count", + ROOT_COUNT, mapper.getTreeSize()); + expand(testData.get(1)); + Assert.assertEquals( + "Map size should not change when expanding a hidden node", + ROOT_COUNT, mapper.getTreeSize()); + checkMapSize(); + expand(roots.get(0)); + Assert.assertEquals("Hidden node should now be expanded as well", + ROOT_COUNT + PARENT_COUNT + LEAF_COUNT, mapper.getTreeSize()); + checkMapSize(); + collapse(roots.get(0)); + Assert.assertEquals("Map size should be equal to root node count", + ROOT_COUNT, mapper.getTreeSize()); + checkMapSize(); + } + + @Test + public void expandLeafNode() { + Assert.assertEquals("Map size should be equal to root node count", + ROOT_COUNT, mapper.getTreeSize()); + expand(testData.get(0)); + expand(testData.get(1)); + Assert.assertEquals("Root and parent node expanded", + ROOT_COUNT + PARENT_COUNT + LEAF_COUNT, mapper.getTreeSize()); + checkMapSize(); + expand(testData.get(2)); + Assert.assertEquals("Expanding a leaf node should have no effect", + ROOT_COUNT + PARENT_COUNT + LEAF_COUNT, mapper.getTreeSize()); + checkMapSize(); + } + + @Test + public void findParentIndexOfLeaf() { + expand(testData.get(0)); + Assert.assertEquals("Could not find the root node of a parent", + Integer.valueOf(0), mapper.getParentIndex(testData.get(1))); + + expand(testData.get(1)); + Assert.assertEquals("Could not find the parent of a leaf", + Integer.valueOf(1), mapper.getParentIndex(testData.get(2))); + } + + @Test + public void fetchRangeOfRows() { + expand(testData.get(0)); + expand(testData.get(1)); + + List expectedResult = testData.stream() + .filter(n -> roots.contains(n) + || n.getParent().equals(testData.get(0)) + || n.getParent().equals(testData.get(1))) + .collect(Collectors.toList()); + + // Range containing deepest level of expanded nodes without their + // parents in addition to root nodes at the end. + Range range = Range.between(3, mapper.getTreeSize()); + verifyFetchIsCorrect(expectedResult, range); + + // Only the expanded two nodes, nothing more. + range = Range.between(0, 2); + verifyFetchIsCorrect(expectedResult, range); + + // Fetch everything + range = Range.between(0, mapper.getTreeSize()); + verifyFetchIsCorrect(expectedResult, range); + } + + @Test + public void fetchRangeOfRowsWithSorting() { + // Expand before sort + expand(testData.get(0)); + expand(testData.get(1)); + + // Construct a sorted version of test data with correct filters + List> levels = new ArrayList<>(); + Comparator comparator = Comparator.comparing(Node::getNumber) + .reversed(); + levels.add(testData.stream().filter(n -> n.getParent() == null) + .sorted(comparator).collect(Collectors.toList())); + levels.add( + testData.stream().filter(n -> n.getParent() == testData.get(0)) + .sorted(comparator).collect(Collectors.toList())); + levels.add( + testData.stream().filter(n -> n.getParent() == testData.get(1)) + .sorted(comparator).collect(Collectors.toList())); + + List expectedResult = levels.get(0).stream().flatMap(root -> { + Stream nextLevel = levels.get(1).stream() + .filter(n -> n.getParent() == root) + .flatMap(node -> Stream.concat(Stream.of(node), + levels.get(2).stream() + .filter(n -> n.getParent() == node))); + return Stream.concat(Stream.of(root), nextLevel); + }).collect(Collectors.toList()); + + // Apply sorting + mapper.setInMemorySorting(comparator::compare); + + // Range containing deepest level of expanded nodes without their + // parents in addition to root nodes at the end. + Range range = Range.between(8, mapper.getTreeSize()); + verifyFetchIsCorrect(expectedResult, range); + + // Only the root nodes, nothing more. + range = Range.between(0, ROOT_COUNT); + verifyFetchIsCorrect(expectedResult, range); + + // Fetch everything + range = Range.between(0, mapper.getTreeSize()); + verifyFetchIsCorrect(expectedResult, range); + } + + @Test + public void fetchWithFilter() { + expand(testData.get(0)); + Node expandedNode = testData.get(2 + LEAF_COUNT); // Expand second node + expand(expandedNode); + + SerializablePredicate filter = n -> n.getNumber() % 2 == 0; + List expectedResult = testData.stream().filter(filter) + .filter(n -> roots.contains(n) + || n.getParent().equals(testData.get(0)) + || n.getParent().equals(expandedNode)) + .collect(Collectors.toList()); + + mapper.setFilter(filter); + + // Fetch everything + Range range = Range.between(0, mapper.getTreeSize()); + verifyFetchIsCorrect(expectedResult, range); + } + + private void expand(Node node) { + insertRows(mapper.doExpand(node, mapper.getIndexOf(node))); + } + + private void collapse(Node node) { + removeRows(mapper.doCollapse(node, mapper.getIndexOf(node))); + } + + private void verifyFetchIsCorrect(List expectedResult, Range range) { + List collect = mapper.fetchItems(range) + .collect(Collectors.toList()); + for (int i = 0; i < range.length(); ++i) { + Assert.assertEquals("Unexpected fetch results.", + expectedResult.get(i + range.getStart()), collect.get(i)); + } + } + + private static List generateTestData() { + List nodes = new ArrayList<>(); + for (int i = 0; i < ROOT_COUNT; ++i) { + Node root = new Node(); + nodes.add(root); + for (int j = 0; j < PARENT_COUNT; ++j) { + Node parent = new Node(root); + nodes.add(parent); + for (int k = 0; k < LEAF_COUNT; ++k) { + nodes.add(new Node(parent)); + } + } + } + return nodes; + } + + private void checkMapSize() { + Assert.assertEquals("Map size not properly updated", + mapper.getTreeSize(), mapSize); + } + + public void removeRows(Range range) { + Assert.assertTrue("Index not in range", + 0 <= range.getStart() && range.getStart() < mapSize); + Assert.assertTrue("Removing more items than in map", + range.getEnd() <= mapSize); + mapSize -= range.length(); + } + + public void insertRows(Range range) { + Assert.assertTrue("Index not in range", + 0 <= range.getStart() && range.getStart() <= mapSize); + mapSize += range.length(); + } +} diff --git a/server/src/test/java/com/vaadin/data/provider/hierarchical/Node.java b/server/src/test/java/com/vaadin/data/provider/hierarchical/Node.java new file mode 100644 index 0000000000..c15f713f53 --- /dev/null +++ b/server/src/test/java/com/vaadin/data/provider/hierarchical/Node.java @@ -0,0 +1,33 @@ +package com.vaadin.data.provider.hierarchical; + +import java.io.Serializable; + +public class Node implements Serializable{ + + private static int counter = 0; + + private final Node parent; + private final int number; + + public Node() { + this(null); + } + + public Node(Node parent) { + this.parent = parent; + this.number = counter++; + } + + public Node getParent() { + return parent; + } + + public int getNumber() { + return number; + } + + public String toString() { + return number + (parent != null ? " [parent: " + parent.toString() + "]" + : ""); + } +} diff --git a/shared/src/main/java/com/vaadin/shared/ui/treegrid/FocusParentRpc.java b/shared/src/main/java/com/vaadin/shared/ui/treegrid/FocusParentRpc.java index 856b3369db..333fc84d35 100644 --- a/shared/src/main/java/com/vaadin/shared/ui/treegrid/FocusParentRpc.java +++ b/shared/src/main/java/com/vaadin/shared/ui/treegrid/FocusParentRpc.java @@ -28,10 +28,10 @@ public interface FocusParentRpc extends ServerRpc { /** * Focuses cell in the row parent * - * @param rowIndex - * the row index + * @param rowKey + * the row key * @param cellIndex * the cell index */ - void focusParent(int rowIndex, int cellIndex); + void focusParent(String rowKey, int cellIndex); } diff --git a/uitest/src/main/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchy.java b/uitest/src/main/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchy.java index d630e36292..53d1cb98e7 100644 --- a/uitest/src/main/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchy.java +++ b/uitest/src/main/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchy.java @@ -14,8 +14,7 @@ import com.vaadin.ui.TreeGrid; public class TreeGridChangingHierarchy extends AbstractTestUI { - private static class TestDataProvider - extends TreeDataProvider { + private static class TestDataProvider extends TreeDataProvider { private TreeData treeData; @@ -64,24 +63,34 @@ public class TreeGridChangingHierarchy extends AbstractTestUI { Button btn3 = new Button("remove a/a"); btn3.addClickListener(event -> { data.removeItem("a/a"); + // Inform item removal to DataProvider + grid.getDataProvider().refreshAll(); }); Button btn4 = new Button("remove children of a/a"); btn4.addClickListener(event -> { data.removeItem("a/a/a"); data.removeItem("a/a/c"); + // Inform item removal to DataProvider + grid.getDataProvider().refreshAll(); }); Button btn5 = new Button("remove a"); btn5.addClickListener(event -> { data.removeItem("a"); + // Inform item removal to DataProvider + grid.getDataProvider().refreshAll(); }); Button btn6 = new Button("remove children of a"); btn6.addClickListener(event -> { data.removeItem("a/a"); data.removeItem("a/b"); + // Inform item removal to DataProvider + grid.getDataProvider().refreshAll(); }); Button btn7 = new Button("remove children of a/a/a"); btn7.addClickListener(event -> { data.removeItem("a/a/a/a"); + // Inform item removal to DataProvider + grid.getDataProvider().refreshAll(); }); addComponents(grid, btn, btn2, btn3, btn4, btn5, btn6, btn7); diff --git a/uitest/src/test/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchyTest.java b/uitest/src/test/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchyTest.java index 89273af661..eaac894523 100644 --- a/uitest/src/test/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchyTest.java +++ b/uitest/src/test/java/com/vaadin/tests/components/treegrid/TreeGridChangingHierarchyTest.java @@ -52,9 +52,6 @@ public class TreeGridChangingHierarchyTest extends SingleBrowserTest { grid.expandWithClick(1); grid.collapseWithClick(0); removeAABtn.click(); - // Item removed from hierarchy. when encountering less children than - // expected, should reset: - grid.expandWithClick(0); // expand "a" after the reset: grid.expandWithClick(0); // "a/a" should be removed from a's children: @@ -92,9 +89,6 @@ public class TreeGridChangingHierarchyTest extends SingleBrowserTest { grid.expandWithClick(2); removeChildrenOfAAABtn.click(); grid.collapseWithClick(1); - // reset should get triggered here - grid.expandWithClick(1); - grid.expandWithClick(0); grid.expandWithClick(1); assertEquals("a/a/a", grid.getCell(2, 0).getText()); assertFalse(grid.hasExpandToggle(2, 0)); @@ -106,7 +100,9 @@ public class TreeGridChangingHierarchyTest extends SingleBrowserTest { grid.expandWithClick(0); grid.getCell(1, 0).click(); removeChildrenOfABtn.click(); - grid.collapseWithClick(0); + // HierarchyMapper will notice the removal of the children of a, and + // mark it as collapsed. + // grid.collapseWithClick(0); grid.getCell(1, 0).click(); assertTrue(grid.getRow(1).isSelected()); } -- 2.39.5