aboutsummaryrefslogtreecommitdiffstats
path: root/sonar-duplications/src/main/java/org/sonar
diff options
context:
space:
mode:
authorSimon Brandhof <simon.brandhof@sonarsource.com>2015-05-25 21:38:15 +0200
committerSimon Brandhof <simon.brandhof@sonarsource.com>2015-05-28 09:28:50 +0200
commitab50f4f37d637e09de2a47872dfe9ea07ea07e64 (patch)
tree18f63503930e86574bb0e0d4defb39234475b426 /sonar-duplications/src/main/java/org/sonar
parent1bdb2686d791326ea0694127047323ad3d8695be (diff)
downloadsonarqube-ab50f4f37d637e09de2a47872dfe9ea07ea07e64.tar.gz
sonarqube-ab50f4f37d637e09de2a47872dfe9ea07ea07e64.zip
SONAR-6370 Decrease coupling of sonar-duplications with guava
Diffstat (limited to 'sonar-duplications/src/main/java/org/sonar')
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/CodeFragment.java2
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/block/Block.java4
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/block/BlockChunker.java6
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/detector/original/BlocksGroup.java10
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/detector/original/Filter.java8
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/detector/original/OriginalCloneDetectionAlgorithm.java23
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/DuplicationsCollector.java20
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/Search.java16
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/SuffixTreeCloneDetectionAlgorithm.java25
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/TextSet.java4
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/index/CloneGroup.java3
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java35
-rw-r--r--sonar-duplications/src/main/java/org/sonar/duplications/internal/pmd/PmdBlockChunker.java11
13 files changed, 79 insertions, 88 deletions
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/CodeFragment.java b/sonar-duplications/src/main/java/org/sonar/duplications/CodeFragment.java
index 42e30bfec13..375061dff8a 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/CodeFragment.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/CodeFragment.java
@@ -19,7 +19,6 @@
*/
package org.sonar.duplications;
-import com.google.common.annotations.Beta;
/**
* TODO Enforce contracts of this interface in concrete classes by using preconditions, currently this leads to failures of tests.
@@ -28,7 +27,6 @@ import com.google.common.annotations.Beta;
*
* @since 2.14
*/
-@Beta
public interface CodeFragment {
/**
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/block/Block.java b/sonar-duplications/src/main/java/org/sonar/duplications/block/Block.java
index 349ada7dd16..32167066a40 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/block/Block.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/block/Block.java
@@ -19,7 +19,6 @@
*/
package org.sonar.duplications.block;
-import com.google.common.annotations.Beta;
import org.sonar.duplications.CodeFragment;
/**
@@ -89,7 +88,6 @@ public final class Block implements CodeFragment {
return this;
}
- @Beta
public Builder setUnit(int start, int end) {
this.startUnit = start;
this.endUnit = end;
@@ -142,7 +140,6 @@ public final class Block implements CodeFragment {
/**
* @since 2.14
*/
- @Beta
public int getStartUnit() {
return startUnit;
}
@@ -150,7 +147,6 @@ public final class Block implements CodeFragment {
/**
* @since 2.14
*/
- @Beta
public int getEndUnit() {
return endUnit;
}
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/block/BlockChunker.java b/sonar-duplications/src/main/java/org/sonar/duplications/block/BlockChunker.java
index d7adf472f2f..31034f70f63 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/block/BlockChunker.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/block/BlockChunker.java
@@ -19,7 +19,7 @@
*/
package org.sonar.duplications.block;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
import org.sonar.duplications.statement.Statement;
import java.util.Collections;
@@ -55,7 +55,7 @@ public class BlockChunker {
}
public List<Block> chunk(String resourceId, List<Statement> statements) {
- List<Statement> filtered = Lists.newArrayList();
+ List<Statement> filtered = new ArrayList<>();
int i = 0;
while (i < statements.size()) {
Statement first = statements.get(i);
@@ -75,7 +75,7 @@ public class BlockChunker {
return Collections.emptyList();
}
Statement[] statementsArr = statements.toArray(new Statement[statements.size()]);
- List<Block> blocks = Lists.newArrayListWithCapacity(statementsArr.length - blockSize + 1);
+ List<Block> blocks = new ArrayList<>(statementsArr.length - blockSize + 1);
long hash = 0;
int first = 0;
int last = 0;
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/BlocksGroup.java b/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/BlocksGroup.java
index 063d98891ac..67689f1e70c 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/BlocksGroup.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/BlocksGroup.java
@@ -19,14 +19,12 @@
*/
package org.sonar.duplications.detector.original;
+import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
-
import org.sonar.duplications.block.Block;
import org.sonar.duplications.utils.FastStringComparator;
-import com.google.common.collect.Lists;
-
/**
* Set of {@link Block}s, which internally stored as a sorted list.
*/
@@ -44,7 +42,7 @@ final class BlocksGroup {
protected final List<Block> blocks;
private BlocksGroup() {
- this.blocks = Lists.newArrayList();
+ this.blocks = new ArrayList<>();
}
public int size() {
@@ -167,7 +165,7 @@ final class BlocksGroup {
}
private static List<Block[]> pairs(BlocksGroup beginGroup, BlocksGroup endGroup, int len) {
- List<Block[]> result = Lists.newArrayList();
+ List<Block[]> result = new ArrayList<>();
List<Block> beginBlocks = beginGroup.blocks;
List<Block> endBlocks = endGroup.blocks;
int i = 0;
@@ -180,7 +178,7 @@ final class BlocksGroup {
c = beginBlock.getIndexInFile() + len - 1 - endBlock.getIndexInFile();
}
if (c == 0) {
- result.add(new Block[] { beginBlock, endBlock });
+ result.add(new Block[] {beginBlock, endBlock});
i++;
j++;
}
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/Filter.java b/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/Filter.java
index 1ea553dfd41..4289f084b63 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/Filter.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/Filter.java
@@ -20,15 +20,13 @@
package org.sonar.duplications.detector.original;
import java.util.Iterator;
+import java.util.LinkedList;
import java.util.List;
-
import org.sonar.duplications.detector.ContainsInComparator;
import org.sonar.duplications.index.CloneGroup;
import org.sonar.duplications.index.ClonePart;
import org.sonar.duplications.utils.SortedListsUtils;
-import com.google.common.collect.Lists;
-
/**
* Performs incremental and brute force algorithm in order to filter clones, which are fully covered by other clones.
* All clones for filtering must be of the same origin - there is no sanity check on this.
@@ -48,7 +46,7 @@ final class Filter {
*
* @see #add(CloneGroup)
*/
- private final List<CloneGroup> filtered = Lists.newLinkedList();
+ private final List<CloneGroup> filtered = new LinkedList<>();
/**
* @return current results of filtering
@@ -115,7 +113,7 @@ final class Filter {
List<ClonePart> firstParts = first.getCloneParts();
List<ClonePart> secondParts = second.getCloneParts();
return SortedListsUtils.contains(secondParts, firstParts, new ContainsInComparator(second.getCloneUnitLength(), first.getCloneUnitLength()))
- && SortedListsUtils.contains(firstParts, secondParts, ContainsInComparator.RESOURCE_ID_COMPARATOR);
+ && SortedListsUtils.contains(firstParts, secondParts, ContainsInComparator.RESOURCE_ID_COMPARATOR);
}
}
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/OriginalCloneDetectionAlgorithm.java b/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/OriginalCloneDetectionAlgorithm.java
index 0c721b48a3a..bfd95e633b6 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/OriginalCloneDetectionAlgorithm.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/detector/original/OriginalCloneDetectionAlgorithm.java
@@ -19,19 +19,18 @@
*/
package org.sonar.duplications.detector.original;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import org.sonar.duplications.block.Block;
import org.sonar.duplications.block.ByteArray;
import org.sonar.duplications.index.CloneGroup;
import org.sonar.duplications.index.CloneIndex;
import org.sonar.duplications.index.ClonePart;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
/**
* Implementation of algorithm described in paper
* <a href="http://www4.in.tum.de/~juergens/publications/icsm2010_crc.pdf">Index-Based Code Clone Detection: Incremental, Distributed, Scalable</a>
@@ -69,7 +68,7 @@ public final class OriginalCloneDetectionAlgorithm {
// Godin: create one group per unique hash
// TODO Godin: can we create map with expected size?
- Map<ByteArray, BlocksGroup> groupsByHash = Maps.newHashMap();
+ Map<ByteArray, BlocksGroup> groupsByHash = new HashMap<>();
for (Block fileBlock : fileBlocks) {
ByteArray hash = fileBlock.getBlockHash();
BlocksGroup sameHash = groupsByHash.get(hash);
@@ -202,16 +201,16 @@ public final class OriginalCloneDetectionAlgorithm {
List<Block[]> pairs = beginGroup.pairs(endGroup, cloneLength);
ClonePart origin = null;
- List<ClonePart> parts = Lists.newArrayList();
+ List<ClonePart> parts = new ArrayList<>();
for (int i = 0; i < pairs.size(); i++) {
Block[] pair = pairs.get(i);
Block firstBlock = pair[0];
Block lastBlock = pair[1];
ClonePart part = new ClonePart(firstBlock.getResourceId(),
- firstBlock.getIndexInFile(),
- firstBlock.getStartLine(),
- lastBlock.getEndLine());
+ firstBlock.getIndexInFile(),
+ firstBlock.getStartLine(),
+ lastBlock.getEndLine());
if (originResourceId.equals(part.getResourceId())) {
if (origin == null) {
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/DuplicationsCollector.java b/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/DuplicationsCollector.java
index a1b4d795fc4..70f1dce99b9 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/DuplicationsCollector.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/DuplicationsCollector.java
@@ -19,16 +19,15 @@
*/
package org.sonar.duplications.detector.suffixtree;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
import org.sonar.duplications.block.Block;
import org.sonar.duplications.detector.ContainsInComparator;
import org.sonar.duplications.index.CloneGroup;
import org.sonar.duplications.index.ClonePart;
import org.sonar.duplications.utils.SortedListsUtils;
-import java.util.Collections;
-import java.util.List;
-
/**
* Implementation of {@link Search.Collector}, which constructs {@link CloneGroup}s.
*/
@@ -37,7 +36,7 @@ public class DuplicationsCollector extends Search.Collector {
private final TextSet text;
private final String originResourceId;
- private final List<CloneGroup> filtered = Lists.newArrayList();
+ private final List<CloneGroup> filtered = new ArrayList<>();
private int length;
private int count;
@@ -66,7 +65,6 @@ public class DuplicationsCollector extends Search.Collector {
*
* @param start number of first block from text for this part
* @param end number of last block from text for this part
- * @param len number of blocks in this part
*/
@Override
public void part(int start, int end) {
@@ -84,15 +82,15 @@ public class DuplicationsCollector extends Search.Collector {
CloneGroup.Builder builder = CloneGroup.builder().setLength(length);
- List<ClonePart> parts = Lists.newArrayListWithCapacity(count);
+ List<ClonePart> parts = new ArrayList<>(count);
for (int[] b : blockNumbers) {
Block firstBlock = text.getBlock(b[0]);
Block lastBlock = text.getBlock(b[1]);
ClonePart part = new ClonePart(
- firstBlock.getResourceId(),
- firstBlock.getIndexInFile(),
- firstBlock.getStartLine(),
- lastBlock.getEndLine());
+ firstBlock.getResourceId(),
+ firstBlock.getIndexInFile(),
+ firstBlock.getStartLine(),
+ lastBlock.getEndLine());
// TODO Godin: maybe use FastStringComparator here ?
if (originResourceId.equals(part.getResourceId())) {
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/Search.java b/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/Search.java
index f010b4dbf01..5afde31f2d8 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/Search.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/Search.java
@@ -19,9 +19,13 @@
*/
package org.sonar.duplications.detector.suffixtree;
-import com.google.common.collect.Lists;
-
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Deque;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.ListIterator;
public final class Search {
@@ -29,8 +33,8 @@ public final class Search {
private final TextSet text;
private final Collector reporter;
- private final List<Integer> list = Lists.newArrayList();
- private final List<Node> innerNodes = Lists.newArrayList();
+ private final List<Integer> list = new ArrayList<>();
+ private final List<Node> innerNodes = new ArrayList<>();
public static void perform(TextSet text, Collector reporter) {
new Search(SuffixTree.create(text), text, reporter).compute();
@@ -64,7 +68,7 @@ public final class Search {
* Depth-first search (DFS).
*/
private void dfs() {
- Deque<Node> stack = Lists.newLinkedList();
+ Deque<Node> stack = new LinkedList<>();
stack.add(tree.getRootNode());
while (!stack.isEmpty()) {
Node node = stack.removeLast();
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/SuffixTreeCloneDetectionAlgorithm.java b/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/SuffixTreeCloneDetectionAlgorithm.java
index e95bb043b1a..aba733f1d3d 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/SuffixTreeCloneDetectionAlgorithm.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/SuffixTreeCloneDetectionAlgorithm.java
@@ -19,24 +19,29 @@
*/
package org.sonar.duplications.detector.suffixtree;
-import java.util.*;
-
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
import org.sonar.duplications.block.Block;
import org.sonar.duplications.block.ByteArray;
import org.sonar.duplications.index.CloneGroup;
import org.sonar.duplications.index.CloneIndex;
-import com.google.common.collect.*;
-
public final class SuffixTreeCloneDetectionAlgorithm {
public static List<CloneGroup> detect(CloneIndex cloneIndex, Collection<Block> fileBlocks) {
if (fileBlocks.isEmpty()) {
- return Collections.EMPTY_LIST;
+ return Collections.emptyList();
}
TextSet text = createTextSet(cloneIndex, fileBlocks);
if (text == null) {
- return Collections.EMPTY_LIST;
+ return Collections.emptyList();
}
DuplicationsCollector reporter = new DuplicationsCollector(text);
Search.perform(text, reporter);
@@ -47,7 +52,7 @@ public final class SuffixTreeCloneDetectionAlgorithm {
}
private static TextSet createTextSet(CloneIndex index, Collection<Block> fileBlocks) {
- Set<ByteArray> hashes = Sets.newHashSet();
+ Set<ByteArray> hashes = new HashSet<>();
for (Block fileBlock : fileBlocks) {
hashes.add(fileBlock.getBlockHash());
}
@@ -66,7 +71,7 @@ public final class SuffixTreeCloneDetectionAlgorithm {
private static TextSet createTextSet(Collection<Block> fileBlocks, Map<String, List<Block>> fromIndex) {
TextSet.Builder textSetBuilder = TextSet.builder();
// TODO Godin: maybe we can reduce size of tree and so memory consumption by removing non-repeatable blocks
- List<Block> sortedFileBlocks = Lists.newArrayList(fileBlocks);
+ List<Block> sortedFileBlocks = new ArrayList<>(fileBlocks);
Collections.sort(sortedFileBlocks, BLOCK_COMPARATOR);
textSetBuilder.add(sortedFileBlocks);
@@ -88,7 +93,7 @@ public final class SuffixTreeCloneDetectionAlgorithm {
}
private static Map<String, List<Block>> retrieveFromIndex(CloneIndex index, String originResourceId, Set<ByteArray> hashes) {
- Map<String, List<Block>> collection = Maps.newHashMap();
+ Map<String, List<Block>> collection = new HashMap<>();
for (ByteArray hash : hashes) {
Collection<Block> blocks = index.getBySequenceHash(hash);
for (Block blockFromIndex : blocks) {
@@ -97,7 +102,7 @@ public final class SuffixTreeCloneDetectionAlgorithm {
if (!originResourceId.equals(resourceId)) {
List<Block> list = collection.get(resourceId);
if (list == null) {
- list = Lists.newArrayList();
+ list = new ArrayList<>();
collection.put(resourceId, list);
}
list.add(blockFromIndex);
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/TextSet.java b/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/TextSet.java
index 82ab1db55a6..4958bc0502e 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/TextSet.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/TextSet.java
@@ -19,11 +19,11 @@
*/
package org.sonar.duplications.detector.suffixtree;
+import java.util.ArrayList;
import java.util.List;
import org.sonar.duplications.block.Block;
-import com.google.common.collect.Lists;
/**
* Simplifies construction of <a href="http://en.wikipedia.org/wiki/Generalised_suffix_tree">generalised suffix-tree</a>.
@@ -32,7 +32,7 @@ public final class TextSet extends AbstractText {
public static final class Builder {
- private List<Object> symbols = Lists.newArrayList();
+ private List<Object> symbols = new ArrayList();
private Integer lengthOfOrigin;
private int count;
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/index/CloneGroup.java b/sonar-duplications/src/main/java/org/sonar/duplications/index/CloneGroup.java
index 05eab3dd108..0fec9682046 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/index/CloneGroup.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/index/CloneGroup.java
@@ -19,10 +19,8 @@
*/
package org.sonar.duplications.index;
-import com.google.common.annotations.Beta;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
-
import java.util.ArrayList;
import java.util.List;
@@ -104,7 +102,6 @@ public class CloneGroup {
*
* @since 2.14
*/
- @Beta
public int getLengthInUnits() {
return length;
}
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java b/sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java
index 21a0f890ab0..f0257cef0e2 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/index/PackedMemoryCloneIndex.java
@@ -19,14 +19,13 @@
*/
package org.sonar.duplications.index;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
import org.sonar.duplications.block.Block;
import org.sonar.duplications.block.ByteArray;
import org.sonar.duplications.utils.FastStringComparator;
-import java.util.Collection;
-import java.util.List;
-
/**
* Provides an index optimized by memory.
* <p>
@@ -102,7 +101,7 @@ public class PackedMemoryCloneIndex extends AbstractCloneIndex {
int index = DataUtils.binarySearch(byResourceId);
- List<Block> result = Lists.newArrayList();
+ List<Block> result = new ArrayList<>();
int realIndex = resourceIdsIndex[index];
while (index < size && FastStringComparator.INSTANCE.compare(resourceIds[realIndex], resourceId) == 0) {
// extract block (note that there is no need to extract resourceId)
@@ -118,12 +117,12 @@ public class PackedMemoryCloneIndex extends AbstractCloneIndex {
int endUnit = blockData[offset];
Block block = blockBuilder
- .setResourceId(resourceId)
- .setBlockHash(new ByteArray(hash))
- .setIndexInFile(indexInFile)
- .setLines(firstLineNumber, lastLineNumber)
- .setUnit(startUnit, endUnit)
- .build();
+ .setResourceId(resourceId)
+ .setBlockHash(new ByteArray(hash))
+ .setIndexInFile(indexInFile)
+ .setLines(firstLineNumber, lastLineNumber)
+ .setUnit(startUnit, endUnit)
+ .build();
result.add(block);
index++;
@@ -151,7 +150,7 @@ public class PackedMemoryCloneIndex extends AbstractCloneIndex {
int index = DataUtils.binarySearch(byBlockHash);
- List<Block> result = Lists.newArrayList();
+ List<Block> result = new ArrayList<>();
while (index < size && !isLessByHash(size, index)) {
// extract block (note that there is no need to extract hash)
String resourceId = resourceIds[index];
@@ -163,12 +162,12 @@ public class PackedMemoryCloneIndex extends AbstractCloneIndex {
int endUnit = blockData[offset];
Block block = blockBuilder
- .setResourceId(resourceId)
- .setBlockHash(sequenceHash)
- .setIndexInFile(indexInFile)
- .setLines(firstLineNumber, lastLineNumber)
- .setUnit(startUnit, endUnit)
- .build();
+ .setResourceId(resourceId)
+ .setBlockHash(sequenceHash)
+ .setIndexInFile(indexInFile)
+ .setLines(firstLineNumber, lastLineNumber)
+ .setUnit(startUnit, endUnit)
+ .build();
result.add(block);
index++;
}
diff --git a/sonar-duplications/src/main/java/org/sonar/duplications/internal/pmd/PmdBlockChunker.java b/sonar-duplications/src/main/java/org/sonar/duplications/internal/pmd/PmdBlockChunker.java
index 1d329208721..20208ed6b83 100644
--- a/sonar-duplications/src/main/java/org/sonar/duplications/internal/pmd/PmdBlockChunker.java
+++ b/sonar-duplications/src/main/java/org/sonar/duplications/internal/pmd/PmdBlockChunker.java
@@ -19,12 +19,11 @@
*/
package org.sonar.duplications.internal.pmd;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.List;
import org.sonar.duplications.block.Block;
import org.sonar.duplications.block.ByteArray;
-import java.util.List;
-
/**
* Differences with {@link org.sonar.duplications.block.BlockChunker}:
* works with {@link TokensLine},
@@ -51,7 +50,7 @@ public class PmdBlockChunker {
* @return ArrayList as we need a serializable object
*/
public List<Block> chunk(String resourceId, List<TokensLine> fragments) {
- List<TokensLine> filtered = Lists.newArrayList();
+ List<TokensLine> filtered = new ArrayList<>();
int i = 0;
while (i < fragments.size()) {
TokensLine first = fragments.get(i);
@@ -68,10 +67,10 @@ public class PmdBlockChunker {
fragments = filtered;
if (fragments.size() < blockSize) {
- return Lists.newArrayList();
+ return new ArrayList<>();
}
TokensLine[] fragmentsArr = fragments.toArray(new TokensLine[fragments.size()]);
- List<Block> blocks = Lists.newArrayListWithCapacity(fragmentsArr.length - blockSize + 1);
+ List<Block> blocks = new ArrayList<>(fragmentsArr.length - blockSize + 1);
long hash = 0;
int first = 0;
int last = 0;