]> source.dussan.org Git - sonarqube.git/blob
9e0c7ed63e6e978cd621483842db751800add1fa
[sonarqube.git] /
1 /*
2  * SonarQube
3  * Copyright (C) 2009-2023 SonarSource SA
4  * mailto:info AT sonarsource DOT com
5  *
6  * This program is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 3 of the License, or (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this program; if not, write to the Free Software Foundation,
18  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
19  */
20 package org.sonar.ce.task.projectanalysis.duplication;
21
22 import java.util.ArrayList;
23 import java.util.Arrays;
24 import java.util.Collection;
25 import java.util.Collections;
26 import org.junit.Rule;
27 import org.junit.Test;
28 import org.slf4j.event.Level;
29 import org.sonar.api.config.internal.MapSettings;
30 import org.sonar.api.impl.utils.TestSystem2;
31 import org.sonar.api.testfixtures.log.LogTester;
32 import org.sonar.ce.task.log.CeTaskMessages;
33 import org.sonar.ce.task.projectanalysis.component.Component;
34 import org.sonar.ce.task.projectanalysis.component.FileAttributes;
35 import org.sonar.duplications.block.Block;
36 import org.sonar.duplications.block.ByteArray;
37
38 import static com.google.common.base.Strings.padStart;
39 import static java.util.Arrays.asList;
40 import static java.util.Collections.singletonList;
41 import static org.apache.commons.lang.RandomStringUtils.randomAlphanumeric;
42 import static org.assertj.core.api.Assertions.assertThat;
43 import static org.mockito.Mockito.mock;
44 import static org.mockito.Mockito.verify;
45 import static org.sonar.ce.task.projectanalysis.component.Component.Type.FILE;
46 import static org.sonar.ce.task.projectanalysis.component.ReportComponent.builder;
47
48 public class IntegrateCrossProjectDuplicationsTest {
49   private static final String XOO_LANGUAGE = "xoo";
50   private static final String ORIGIN_FILE_KEY = "ORIGIN_FILE_KEY";
51   private static final Component ORIGIN_FILE = builder(FILE, 1)
52     .setKey(ORIGIN_FILE_KEY)
53     .setFileAttributes(new FileAttributes(false, XOO_LANGUAGE, 1))
54     .build();
55   private static final String OTHER_FILE_KEY = "OTHER_FILE_KEY";
56
57   @Rule
58   public LogTester logTester = new LogTester();
59   @Rule
60   public DuplicationRepositoryRule duplicationRepository = DuplicationRepositoryRule.create();
61
62   private TestSystem2 system = new TestSystem2();
63   private MapSettings settings = new MapSettings();
64   private CeTaskMessages ceTaskMessages = mock(CeTaskMessages.class);
65   private IntegrateCrossProjectDuplications underTest = new IntegrateCrossProjectDuplications(settings.asConfig(), duplicationRepository, ceTaskMessages, system);
66
67   @Test
68   public void add_duplications_from_two_blocks() {
69     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
70
71     Collection<Block> originBlocks = asList(
72       new Block.Builder()
73         .setResourceId(ORIGIN_FILE_KEY)
74         .setBlockHash(new ByteArray("a8998353e96320ec"))
75         .setIndexInFile(0)
76         .setLines(30, 43)
77         .setUnit(0, 5)
78         .build(),
79       new Block.Builder()
80         .setResourceId(ORIGIN_FILE_KEY)
81         .setBlockHash(new ByteArray("2b5747f0e4c59124"))
82         .setIndexInFile(1)
83         .setLines(32, 45)
84         .setUnit(5, 20)
85         .build());
86
87     Collection<Block> duplicatedBlocks = asList(
88       new Block.Builder()
89         .setResourceId(OTHER_FILE_KEY)
90         .setBlockHash(new ByteArray("a8998353e96320ec"))
91         .setIndexInFile(0)
92         .setLines(40, 53)
93         .build(),
94       new Block.Builder()
95         .setResourceId(OTHER_FILE_KEY)
96         .setBlockHash(new ByteArray("2b5747f0e4c59124"))
97         .setIndexInFile(1)
98         .setLines(42, 55)
99         .build());
100
101     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
102
103     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE))
104       .containsExactly(
105         crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
106   }
107
108   @Test
109   public void add_duplications_from_a_single_block() {
110     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
111
112     Collection<Block> originBlocks = singletonList(
113       // This block contains 11 tokens -> a duplication will be created
114       new Block.Builder()
115         .setResourceId(ORIGIN_FILE_KEY)
116         .setBlockHash(new ByteArray("a8998353e96320ec"))
117         .setIndexInFile(0)
118         .setLines(30, 45)
119         .setUnit(0, 10)
120         .build());
121
122     Collection<Block> duplicatedBlocks = singletonList(
123       new Block.Builder()
124         .setResourceId(OTHER_FILE_KEY)
125         .setBlockHash(new ByteArray("a8998353e96320ec"))
126         .setIndexInFile(0)
127         .setLines(40, 55)
128         .build());
129
130     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
131
132     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE))
133       .containsExactly(
134         crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
135   }
136
137   @Test
138   public void add_no_duplication_from_current_file() {
139     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
140
141     Collection<Block> originBlocks = asList(
142       new Block.Builder()
143         .setResourceId(ORIGIN_FILE_KEY)
144         .setBlockHash(new ByteArray("a8998353e96320ec"))
145         .setIndexInFile(0)
146         .setLines(30, 45)
147         .setUnit(0, 10)
148         .build(),
149       // Duplication is on the same file
150       new Block.Builder()
151         .setResourceId(ORIGIN_FILE_KEY)
152         .setBlockHash(new ByteArray("a8998353e96320ec"))
153         .setIndexInFile(0)
154         .setLines(46, 60)
155         .setUnit(0, 10)
156         .build());
157
158     Collection<Block> duplicatedBlocks = singletonList(
159       new Block.Builder()
160         .setResourceId(OTHER_FILE_KEY)
161         .setBlockHash(new ByteArray("a8998353e96320ed"))
162         .setIndexInFile(0)
163         .setLines(40, 55)
164         .build());
165
166     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
167
168     assertNoDuplicationAdded(ORIGIN_FILE);
169   }
170
171   @Test
172   public void add_no_duplication_when_not_enough_tokens() {
173     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
174
175     Collection<Block> originBlocks = singletonList(
176       // This block contains 5 tokens -> not enough to consider it as a duplication
177       new Block.Builder()
178         .setResourceId(ORIGIN_FILE_KEY)
179         .setBlockHash(new ByteArray("a8998353e96320ec"))
180         .setIndexInFile(0)
181         .setLines(30, 45)
182         .setUnit(0, 4)
183         .build());
184
185     Collection<Block> duplicatedBlocks = singletonList(
186       new Block.Builder()
187         .setResourceId(OTHER_FILE_KEY)
188         .setBlockHash(new ByteArray("a8998353e96320ec"))
189         .setIndexInFile(0)
190         .setLines(40, 55)
191         .build());
192
193     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
194
195     assertNoDuplicationAdded(ORIGIN_FILE);
196   }
197
198   @Test
199   public void add_no_duplication_when_no_duplicated_blocks() {
200     settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
201
202     Collection<Block> originBlocks = singletonList(
203       new Block.Builder()
204         .setResourceId(ORIGIN_FILE_KEY)
205         .setBlockHash(new ByteArray("a8998353e96320ec"))
206         .setIndexInFile(0)
207         .setLines(30, 45)
208         .setUnit(0, 10)
209         .build());
210
211     underTest.computeCpd(ORIGIN_FILE, originBlocks, Collections.emptyList());
212
213     assertNoDuplicationAdded(ORIGIN_FILE);
214   }
215
216   @Test
217   public void add_duplication_for_java_even_when_no_token() {
218     Component javaFile = builder(FILE, 1)
219       .setKey(ORIGIN_FILE_KEY)
220       .setFileAttributes(new FileAttributes(false, "java", 10))
221       .build();
222
223     Collection<Block> originBlocks = singletonList(
224       // This block contains 0 token
225       new Block.Builder()
226         .setResourceId(ORIGIN_FILE_KEY)
227         .setBlockHash(new ByteArray("a8998353e96320ec"))
228         .setIndexInFile(0)
229         .setLines(30, 45)
230         .setUnit(0, 0)
231         .build());
232
233     Collection<Block> duplicatedBlocks = singletonList(
234       new Block.Builder()
235         .setResourceId(OTHER_FILE_KEY)
236         .setBlockHash(new ByteArray("a8998353e96320ec"))
237         .setIndexInFile(0)
238         .setLines(40, 55)
239         .build());
240
241     underTest.computeCpd(javaFile, originBlocks, duplicatedBlocks);
242
243     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE))
244       .containsExactly(
245         crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
246   }
247
248   @Test
249   public void default_minimum_tokens_is_one_hundred() {
250     settings.setProperty("sonar.cpd.xoo.minimumTokens", (Integer) null);
251
252     Collection<Block> originBlocks = singletonList(
253       new Block.Builder()
254         .setResourceId(ORIGIN_FILE_KEY)
255         .setBlockHash(new ByteArray("a8998353e96320ec"))
256         .setIndexInFile(0)
257         .setLines(30, 45)
258         .setUnit(0, 100)
259         .build());
260
261     Collection<Block> duplicatedBlocks = singletonList(
262       new Block.Builder()
263         .setResourceId(OTHER_FILE_KEY)
264         .setBlockHash(new ByteArray("a8998353e96320ec"))
265         .setIndexInFile(0)
266         .setLines(40, 55)
267         .build());
268
269     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
270
271     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE))
272       .containsExactly(
273         crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
274   }
275
276   @Test
277   public void do_not_compute_more_than_one_hundred_duplications_when_too_many_duplicated_references() {
278     Collection<Block> originBlocks = new ArrayList<>();
279     Collection<Block> duplicatedBlocks = new ArrayList<>();
280
281     Block.Builder blockBuilder = new Block.Builder()
282       .setResourceId(ORIGIN_FILE_KEY)
283       .setBlockHash(new ByteArray("a8998353e96320ec"))
284       .setIndexInFile(0)
285       .setLines(30, 45)
286       .setUnit(0, 100);
287     originBlocks.add(blockBuilder.build());
288
289     // Generate more than 100 duplications of the same block
290     for (int i = 0; i < 110; i++) {
291       duplicatedBlocks.add(
292         blockBuilder
293           .setResourceId(randomAlphanumeric(16))
294           .build());
295     }
296
297     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
298
299     assertThat(logTester.logs(Level.WARN)).containsOnly(
300       "Too many duplication references on file " + ORIGIN_FILE_KEY + " for block at line 30. Keeping only the first 100 references.");
301     Iterable<Duplication> duplications = duplicationRepository.getDuplications(ORIGIN_FILE);
302     assertThat(duplications).hasSize(1);
303     assertThat(duplications.iterator().next().getDuplicates()).hasSize(100);
304   }
305
306   @Test
307   public void do_not_compute_more_than_one_hundred_duplications_when_too_many_duplications() {
308     Collection<Block> originBlocks = new ArrayList<>();
309     Collection<Block> duplicatedBlocks = new ArrayList<>();
310
311     Block.Builder blockBuilder = new Block.Builder()
312       .setIndexInFile(0)
313       .setLines(30, 45)
314       .setUnit(0, 100);
315
316     // Generate more than 100 duplication on different files
317     for (int i = 0; i < 110; i++) {
318       String hash = padStart("hash" + i, 16, 'a');
319       originBlocks.add(
320         blockBuilder
321           .setResourceId(ORIGIN_FILE_KEY)
322           .setBlockHash(new ByteArray(hash))
323           .build());
324       duplicatedBlocks.add(
325         blockBuilder
326           .setResourceId("resource" + i)
327           .setBlockHash(new ByteArray(hash))
328           .build());
329     }
330
331     underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
332
333     assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).hasSize(100);
334     assertThat(logTester.logs(Level.WARN)).containsOnly("Too many duplication groups on file " + ORIGIN_FILE_KEY + ". Keeping only the first 100 groups.");
335   }
336
337   @Test
338   public void log_warning_if_this_deprecated_feature_is_enabled() {
339     settings.setProperty("sonar.cpd.cross_project", "true");
340     system.setNow(1000L);
341
342     new IntegrateCrossProjectDuplications(settings.asConfig(), duplicationRepository, ceTaskMessages, system);
343
344     assertThat(logTester.logs()).containsExactly("This analysis uses the deprecated cross-project duplication feature.");
345     verify(ceTaskMessages).add(new CeTaskMessages.Message("This project uses the deprecated cross-project duplication feature.", 1000L));
346   }
347
348   private static Duplication crossProjectDuplication(TextBlock original, String otherFileKey, TextBlock duplicate) {
349     return new Duplication(original, Arrays.asList(new CrossProjectDuplicate(otherFileKey, duplicate)));
350   }
351
352   private void assertNoDuplicationAdded(Component file) {
353     assertThat(duplicationRepository.getDuplications(file)).isEmpty();
354   }
355
356 }