]> source.dussan.org Git - sonarqube.git/commitdiff
Fix some quality flaws
authorEvgeny Mandrikov <mandrikov@gmail.com>
Wed, 5 Dec 2012 21:30:40 +0000 (22:30 +0100)
committerEvgeny Mandrikov <mandrikov@gmail.com>
Wed, 5 Dec 2012 21:30:40 +0000 (22:30 +0100)
sonar-duplications/src/main/java/org/sonar/duplications/block/ByteArray.java
sonar-duplications/src/main/java/org/sonar/duplications/detector/ContainsInComparator.java
sonar-duplications/src/main/java/org/sonar/duplications/detector/original/BlocksGroup.java
sonar-duplications/src/main/java/org/sonar/duplications/detector/original/OriginalCloneDetectionAlgorithm.java
sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/DuplicationsCollector.java
sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/Edge.java
sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/Search.java
sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/SuffixTree.java
sonar-duplications/src/main/java/org/sonar/duplications/detector/suffixtree/SuffixTreeCloneDetectionAlgorithm.java
sonar-duplications/src/main/java/org/sonar/duplications/java/JavaTokenProducer.java
sonar-duplications/src/main/java/org/sonar/duplications/token/TokenChannel.java

index dc7d78f4665a38abebf3efd91ebd1c255d95eca8..0fe0ad05618c5aee3c58d0196f9c674e2507739a 100644 (file)
@@ -80,7 +80,8 @@ public final class ByteArray {
   }
 
   public int[] toIntArray() {
-    int size = (bytes.length / 4) + (bytes.length % 4 == 0 ? 0 : 1); // Pad the size to multiple of 4
+    // Pad the size to multiple of 4
+    int size = (bytes.length / 4) + (bytes.length % 4 == 0 ? 0 : 1);
     ByteBuffer bb = ByteBuffer.allocate(size * 4);
     bb.put(bytes);
     bb.rewind();
index 9c2ad3c295c2830e49d4410a50492b9362a37d72..917a195a9221d911dde79641ede4ef528e041a9e 100644 (file)
@@ -76,12 +76,15 @@ public final class ContainsInComparator implements Comparator<ClonePart> {
     if (c == 0) {
       if (part1.getUnitStart() <= part2.getUnitStart()) {
         if (part2.getUnitStart() + l2 <= part1.getUnitStart() + l1) {
-          return 0; // part1 contains part2
+          // part1 contains part2
+          return 0;
         } else {
-          return -1; // SortedListsUtils#contains should continue search
+          // SortedListsUtils#contains should continue search
+          return -1;
         }
       } else {
-        return 1; // unitStart of part1 is less than unitStart of part2 - SortedListsUtils#contains should stop search
+        // unitStart of part1 is less than unitStart of part2 - SortedListsUtils#contains should stop search
+        return 1;
       }
     } else {
       return c;
index 0c3e259368b9abbd9fce6e2f9ea07aa14418ca95..ba2dc5c1887479d8ca353df7e8e5fb143c963b40 100644 (file)
@@ -113,15 +113,18 @@ final class BlocksGroup {
       if (c == 0) {
         c = block1.getIndexInFile() + 1 - block2.getIndexInFile();
       }
-      if (c == 0) { // list1[i] == list2[j]
+      if (c == 0) {
+        // list1[i] == list2[j]
         i++;
         j++;
         intersection.blocks.add(block2);
       }
-      if (c > 0) { // list1[i] > list2[j]
+      if (c > 0) {
+        // list1[i] > list2[j]
         j++;
       }
-      if (c < 0) { // list1[i] < list2[j]
+      if (c < 0) {
+        // list1[i] < list2[j]
         i++;
       }
     }
index 84f3633f5f65bf3bd18f49cbc7a108a550ff039b..670a5c59495c0b1c56d0e36f5d3d6cb45d87eeaa 100644 (file)
@@ -68,7 +68,8 @@ public final class OriginalCloneDetectionAlgorithm {
     int size = fileBlocks.size();
 
     // Godin: create one group per unique hash
-    Map<ByteArray, BlocksGroup> groupsByHash = Maps.newHashMap(); // TODO Godin: can we create map with expected size?
+    // TODO Godin: can we create map with expected size?
+    Map<ByteArray, BlocksGroup> groupsByHash = Maps.newHashMap();
     for (Block fileBlock : fileBlocks) {
       ByteArray hash = fileBlock.getBlockHash();
       BlocksGroup sameHash = groupsByHash.get(hash);
index b36f5b3c94fa4bc2b875609de903ee324cbf4cc0..12873e45af9a82cc17769fb751bbe6c6bb037b41 100644 (file)
@@ -95,7 +95,8 @@ public class DuplicationsCollector extends Search.Collector {
           lastBlock.getEndLine());
 
       // TODO Godin: maybe use FastStringComparator here ?
-      if (originResourceId.equals(part.getResourceId())) { // part from origin
+      if (originResourceId.equals(part.getResourceId())) {
+        // part from origin
         if (origin == null) {
           origin = part;
           // To calculate length important to use the origin, because otherwise block may come from DB without required data
index a6c756a20638d994ad5d12a5f8cfaae2846e1c10..1c25e1da6ffe03accd94de150c0deeff820567f1 100644 (file)
@@ -21,10 +21,14 @@ package org.sonar.duplications.detector.suffixtree;
 
 public final class Edge {
 
-  private int beginIndex; // can't be changed
+  // can't be changed
+  private int beginIndex; 
+
   private int endIndex;
   private Node startNode;
-  private Node endNode; // can't be changed, could be used as edge id
+
+  // can't be changed, could be used as edge id
+  private Node endNode;
 
   // each time edge is created, a new end node is created
   public Edge(int beginIndex, int endIndex, Node startNode) {
index c2174c37bd995363aa198107b6738318e8060553..2fea748404a27af5f667af6a3224c69cb81e17dc 100644 (file)
@@ -68,11 +68,13 @@ public final class Search {
     while (!stack.isEmpty()) {
       Node node = stack.removeLast();
       node.startSize = list.size();
-      if (node.getEdges().isEmpty()) { // leaf
+      if (node.getEdges().isEmpty()) {
+        // leaf
         list.add(node.depth);
         node.endSize = list.size();
       } else {
-        if (!node.equals(tree.getRootNode())) { // inner node = not leaf and not root
+        if (!node.equals(tree.getRootNode())) {
+          // inner node = not leaf and not root
           innerNodes.add(node);
         }
         for (Edge edge : node.getEdges()) {
index 70c76aae86dc86e2181800f28d2e4c2ae4896618..7bea7a8d32fcc2ee7785e736c2c48f2acaa16a00 100644 (file)
@@ -103,7 +103,8 @@ public final class SuffixTree {
       active.canonize();
     }
     updateSuffixNode(lastParentNode, parentNode);
-    active.incEndIndex(); // Now the endpoint is the next active point
+    active.incEndIndex();
+    // Now the endpoint is the next active point
     active.canonize();
   }
 
index 99036f4c1bdba27fc218ad4a093b48bc8ccd7fa4..c7240c40d5877e0a1282a5a92d901631e71e27bc 100644 (file)
@@ -55,7 +55,8 @@ public final class SuffixTreeCloneDetectionAlgorithm {
     String originResourceId = fileBlocks.iterator().next().getResourceId();
     Map<String, List<Block>> fromIndex = retrieveFromIndex(index, originResourceId, hashes);
 
-    if (fromIndex.isEmpty() && hashes.size() == fileBlocks.size()) { // optimization for the case when there is no duplications
+    if (fromIndex.isEmpty() && hashes.size() == fileBlocks.size()) {
+      // optimization for the case when there is no duplications
       return null;
     }
 
index 588c0c3e3c87d6b0037a9ef2b9c5b400c8e06598..6a7918082f1eac0c32d977ae76b04aa780ed3584 100644 (file)
@@ -59,15 +59,23 @@ public final class JavaTokenProducer {
         // Identifiers, Keywords, Boolean Literals, The Null Literal
         .token("\\p{javaJavaIdentifierStart}++\\p{javaJavaIdentifierPart}*+")
         // Floating-Point Literals
-        .token("[0-9_]++\\.([0-9_]++)?+" + EXP + "?+" + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL) // Decimal
-        .token("\\.[0-9_]++" + EXP + "?+" + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL) // Decimal
-        .token("[0-9_]++" + EXP + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL) // Decimal
-        .token("0[xX][0-9a-fA-F_]++\\.[0-9a-fA-F_]*+" + BINARY_EXP + "?+" + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL) // Hexadecimal
-        .token("0[xX][0-9a-fA-F_]++" + BINARY_EXP + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL) // Hexadecimal
+        // Decimal
+        .token("[0-9_]++\\.([0-9_]++)?+" + EXP + "?+" + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL)
+        // Decimal
+        .token("\\.[0-9_]++" + EXP + "?+" + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL)
+        // Decimal
+        .token("[0-9_]++" + EXP + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL)
+        // Hexadecimal
+        .token("0[xX][0-9a-fA-F_]++\\.[0-9a-fA-F_]*+" + BINARY_EXP + "?+" + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL)
+        // Hexadecimal
+        .token("0[xX][0-9a-fA-F_]++" + BINARY_EXP + FLOAT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL)
         // Integer Literals
-        .token("0[xX][0-9a-fA-F_]++" + INT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL) // Hexadecimal
-        .token("0[bB][01_]++" + INT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL) // Binary (Java 7)
-        .token("[0-9_]++" + INT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL) // Decimal and Octal
+        // Hexadecimal
+        .token("0[xX][0-9a-fA-F_]++" + INT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL)
+        // Binary (Java 7)
+        .token("0[bB][01_]++" + INT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL)
+        // Decimal and Octal
+        .token("[0-9_]++" + INT_SUFFIX + "?+", NORMALIZED_NUMERIC_LITERAL)
         // Any other character
         .token(".")
         .build();
index 99515f8692156bb710dd4128f79e050aac3da773..9918dcb96e6a1a38ae746c16bce2ee1f96192917 100644 (file)
@@ -44,13 +44,15 @@ class TokenChannel extends Channel<TokenQueue> {
   @Override
   public boolean consume(CodeReader code, TokenQueue output) {
     if (code.popTo(matcher, tmpBuilder) > 0) {
-      Cursor previousCursor = code.getPreviousCursor(); // see SONAR-2499
+      // see SONAR-2499
+      Cursor previousCursor = code.getPreviousCursor();
       if (normalizationValue != null) {
         output.add(new Token(normalizationValue, previousCursor.getLine(), previousCursor.getColumn()));
       } else {
         output.add(new Token(tmpBuilder.toString(), previousCursor.getLine(), previousCursor.getColumn()));
       }
-      tmpBuilder.setLength(0); // Godin: note that other channels use method delete in order to do the same thing
+      // Godin: note that other channels use method delete in order to do the same thing
+      tmpBuilder.setLength(0);
       return true;
     }
     return false;