]> source.dussan.org Git - sonarqube.git/blob
d39ce43e130410b5afc8f6396096f86ba96905d4
[sonarqube.git] /
1 /*
2  * SonarQube
3  * Copyright (C) 2009-2016 SonarSource SA
4  * mailto:contact AT sonarsource DOT com
5  *
6  * This program is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 3 of the License, or (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this program; if not, write to the Free Software Foundation,
18  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
19  */
20 package org.sonar.server.computation.task.projectanalysis.duplication;
21
22 import java.util.ArrayList;
23 import java.util.Arrays;
24 import java.util.Collection;
25 import java.util.Collections;
26 import org.junit.Rule;
27 import org.junit.Test;
28 import org.sonar.api.config.Settings;
29 import org.sonar.api.utils.log.LogTester;
30 import org.sonar.api.utils.log.LoggerLevel;
31 import org.sonar.duplications.block.Block;
32 import org.sonar.duplications.block.ByteArray;
33 import org.sonar.server.computation.task.projectanalysis.component.Component;
34 import org.sonar.server.computation.task.projectanalysis.component.FileAttributes;
35
36 import static com.google.common.base.Strings.padStart;
37 import static java.util.Arrays.asList;
38 import static java.util.Collections.singletonList;
39 import static org.apache.commons.lang.RandomStringUtils.randomAlphanumeric;
40 import static org.assertj.core.api.Assertions.assertThat;
41 import static org.sonar.server.computation.task.projectanalysis.component.Component.Type.FILE;
42 import static org.sonar.server.computation.task.projectanalysis.component.ReportComponent.builder;
43
44 public class IntegrateCrossProjectDuplicationsTest {
45
46   @Rule
47   public LogTester logTester = new LogTester();
48   @Rule
49   public DuplicationRepositoryRule duplicationRepository = DuplicationRepositoryRule.create();
50
51   static final String XOO_LANGUAGE = "xoo";
52
53   static final String ORIGIN_FILE_KEY = "ORIGIN_FILE_KEY";
54   static final Component ORIGIN_FILE = builder(FILE, 1)
55     .setKey(ORIGIN_FILE_KEY)
56     .setFileAttributes(new FileAttributes(false, XOO_LANGUAGE))
57     .build();
58
59   static final String OTHER_FILE_KEY = "OTHER_FILE_KEY";
60
61   Settings settings = new Settings();
62
63   IntegrateCrossProjectDuplications underTest = new IntegrateCrossProjectDuplications(settings, duplicationRepository);
64
65   @Test
66   public void add_duplications_from_two_blocks() {
67     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
68
69     Collection<Block> originBlocks = asList(
70       new Block.Builder()
71         .setResourceId(ORIGIN_FILE_KEY)
72         .setBlockHash(new ByteArray("a8998353e96320ec"))
73         .setIndexInFile(0)
74         .setLines(30, 43)
75         .setUnit(0, 5)
76         .build(),
77       new Block.Builder()
78         .setResourceId(ORIGIN_FILE_KEY)
79         .setBlockHash(new ByteArray("2b5747f0e4c59124"))
80         .setIndexInFile(1)
81         .setLines(32, 45)
82         .setUnit(5, 20)
83         .build()
84       );
85
86     Collection<Block> duplicatedBlocks = asList(
87       new Block.Builder()
88         .setResourceId(OTHER_FILE_KEY)
89         .setBlockHash(new ByteArray("a8998353e96320ec"))
90         .setIndexInFile(0)
91         .setLines(40, 53)
92         .build(),
93       new Block.Builder()
94         .setResourceId(OTHER_FILE_KEY)
95         .setBlockHash(new ByteArray("2b5747f0e4c59124"))
96         .setIndexInFile(1)
97         .setLines(42, 55)
98         .build());
99
100     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
101
102     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE))
103       .containsExactly(
104         crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55))
105       );
106   }
107
108   @Test
109   public void add_duplications_from_a_single_block() {
110     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
111
112     Collection<Block> originBlocks = singletonList(
113       // This block contains 11 tokens -> a duplication will be created
114       new Block.Builder()
115         .setResourceId(ORIGIN_FILE_KEY)
116         .setBlockHash(new ByteArray("a8998353e96320ec"))
117         .setIndexInFile(0)
118         .setLines(30, 45)
119         .setUnit(0, 10)
120         .build()
121       );
122
123     Collection<Block> duplicatedBlocks = singletonList(
124       new Block.Builder()
125         .setResourceId(OTHER_FILE_KEY)
126         .setBlockHash(new ByteArray("a8998353e96320ec"))
127         .setIndexInFile(0)
128         .setLines(40, 55)
129         .build()
130       );
131
132     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
133
134     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE))
135       .containsExactly(
136         crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55))
137       );
138   }
139
140   @Test
141   public void add_no_duplication_from_current_file() {
142     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
143
144     Collection<Block> originBlocks = asList(
145       new Block.Builder()
146         .setResourceId(ORIGIN_FILE_KEY)
147         .setBlockHash(new ByteArray("a8998353e96320ec"))
148         .setIndexInFile(0)
149         .setLines(30, 45)
150         .setUnit(0, 10)
151         .build(),
152       // Duplication is on the same file
153       new Block.Builder()
154         .setResourceId(ORIGIN_FILE_KEY)
155         .setBlockHash(new ByteArray("a8998353e96320ec"))
156         .setIndexInFile(0)
157         .setLines(46, 60)
158         .setUnit(0, 10)
159         .build()
160       );
161
162     Collection<Block> duplicatedBlocks = singletonList(
163       new Block.Builder()
164         .setResourceId(OTHER_FILE_KEY)
165         .setBlockHash(new ByteArray("a8998353e96320ed"))
166         .setIndexInFile(0)
167         .setLines(40, 55)
168         .build()
169       );
170
171     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
172
173     assertNoDuplicationAdded(ORIGIN_FILE);
174   }
175
176   @Test
177   public void add_no_duplication_when_not_enough_tokens() {
178     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
179
180     Collection<Block> originBlocks = singletonList(
181       // This block contains 5 tokens -> not enough to consider it as a duplication
182       new Block.Builder()
183         .setResourceId(ORIGIN_FILE_KEY)
184         .setBlockHash(new ByteArray("a8998353e96320ec"))
185         .setIndexInFile(0)
186         .setLines(30, 45)
187         .setUnit(0, 4)
188         .build()
189       );
190
191     Collection<Block> duplicatedBlocks = singletonList(
192       new Block.Builder()
193         .setResourceId(OTHER_FILE_KEY)
194         .setBlockHash(new ByteArray("a8998353e96320ec"))
195         .setIndexInFile(0)
196         .setLines(40, 55)
197         .build()
198       );
199
200     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
201
202     assertNoDuplicationAdded(ORIGIN_FILE);
203   }
204
205   @Test
206   public void add_no_duplication_when_no_duplicated_blocks() {
207     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
208
209     Collection<Block> originBlocks = singletonList(
210       new Block.Builder()
211         .setResourceId(ORIGIN_FILE_KEY)
212         .setBlockHash(new ByteArray("a8998353e96320ec"))
213         .setIndexInFile(0)
214         .setLines(30, 45)
215         .setUnit(0, 10)
216         .build()
217       );
218
219     underTest.computeCpd(ORIGIN_FILE, originBlocks, Collections.<Block>emptyList());
220
221     assertNoDuplicationAdded(ORIGIN_FILE);
222   }
223
224   @Test
225   public void add_duplication_for_java_even_when_no_token() {
226     Component javaFile = builder(FILE, 1)
227       .setKey(ORIGIN_FILE_KEY)
228       .setFileAttributes(new FileAttributes(false, "java"))
229       .build();
230
231     Collection<Block> originBlocks = singletonList(
232       // This block contains 0 token
233       new Block.Builder()
234         .setResourceId(ORIGIN_FILE_KEY)
235         .setBlockHash(new ByteArray("a8998353e96320ec"))
236         .setIndexInFile(0)
237         .setLines(30, 45)
238         .setUnit(0, 0)
239         .build()
240       );
241
242     Collection<Block> duplicatedBlocks = singletonList(
243       new Block.Builder()
244         .setResourceId(OTHER_FILE_KEY)
245         .setBlockHash(new ByteArray("a8998353e96320ec"))
246         .setIndexInFile(0)
247         .setLines(40, 55)
248         .build()
249       );
250
251     underTest.computeCpd(javaFile, originBlocks, duplicatedBlocks);
252
253     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE))
254       .containsExactly(
255         crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55))
256       );
257   }
258
259   @Test
260   public void default_minimum_tokens_is_one_hundred() {
261     settings.setProperty("sonar.cpd.xoo.minimumTokens", (Integer) null);
262
263     Collection<Block> originBlocks = singletonList(
264       new Block.Builder()
265         .setResourceId(ORIGIN_FILE_KEY)
266         .setBlockHash(new ByteArray("a8998353e96320ec"))
267         .setIndexInFile(0)
268         .setLines(30, 45)
269         .setUnit(0, 100)
270         .build()
271       );
272
273     Collection<Block> duplicatedBlocks = singletonList(
274       new Block.Builder()
275         .setResourceId(OTHER_FILE_KEY)
276         .setBlockHash(new ByteArray("a8998353e96320ec"))
277         .setIndexInFile(0)
278         .setLines(40, 55)
279         .build()
280       );
281
282     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
283
284     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE))
285       .containsExactly(
286         crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55))
287       );
288   }
289
290   @Test
291   public void do_not_compute_more_than_one_hundred_duplications_when_too_many_duplicated_references() throws Exception {
292     Collection<Block> originBlocks = new ArrayList<>();
293     Collection<Block> duplicatedBlocks = new ArrayList<>();
294
295     Block.Builder blockBuilder = new Block.Builder()
296       .setResourceId(ORIGIN_FILE_KEY)
297       .setBlockHash(new ByteArray("a8998353e96320ec"))
298       .setIndexInFile(0)
299       .setLines(30, 45)
300       .setUnit(0, 100);
301     originBlocks.add(blockBuilder.build());
302
303     // Generate more than 100 duplications of the same block
304     for (int i = 0; i < 110; i++) {
305       duplicatedBlocks.add(
306         blockBuilder
307           .setResourceId(randomAlphanumeric(16))
308           .build()
309         );
310     }
311
312     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
313
314     assertThat(logTester.logs(LoggerLevel.WARN)).containsOnly(
315       "Too many duplication references on file " + ORIGIN_FILE_KEY + " for block at line 30. Keeping only the first 100 references.");
316     Iterable<Duplication> duplications = duplicationRepository.getDuplications(ORIGIN_FILE);
317     assertThat(duplications).hasSize(1);
318     assertThat(duplications.iterator().next().getDuplicates()).hasSize(100);
319   }
320
321   @Test
322   public void do_not_compute_more_than_one_hundred_duplications_when_too_many_duplications() throws Exception {
323     Collection<Block> originBlocks = new ArrayList<>();
324     Collection<Block> duplicatedBlocks = new ArrayList<>();
325
326     Block.Builder blockBuilder = new Block.Builder()
327       .setIndexInFile(0)
328       .setLines(30, 45)
329       .setUnit(0, 100);
330
331     // Generate more than 100 duplication on different files
332     for (int i = 0; i < 110; i++) {
333       String hash = padStart("hash" + i, 16, 'a');
334       originBlocks.add(
335         blockBuilder
336           .setResourceId(ORIGIN_FILE_KEY)
337           .setBlockHash(new ByteArray(hash))
338           .build());
339       duplicatedBlocks.add(
340         blockBuilder
341           .setResourceId("resource" + i)
342           .setBlockHash(new ByteArray(hash))
343           .build()
344         );
345     }
346
347     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
348
349     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).hasSize(100);
350     assertThat(logTester.logs(LoggerLevel.WARN)).containsOnly("Too many duplication groups on file " + ORIGIN_FILE_KEY + ". Keeping only the first 100 groups.");
351   }
352
353   private static Duplication crossProjectDuplication(TextBlock original, String otherFileKey, TextBlock duplicate) {
354     return new Duplication(original, Arrays.<Duplicate>asList(new CrossProjectDuplicate(otherFileKey, duplicate)));
355   }
356
357   private void assertNoDuplicationAdded(Component file) {
358     assertThat(duplicationRepository.getDuplications(file)).isEmpty();
359   }
360
361 }