return mapper(dbSession).selectProjectsByNameQuery(nameQueryForSql, includeModules);
}
+ public List<KeyWithUuidDto> selectComponentKeysHavingIssuesToMerge(DbSession dbSession, String mergeBranchUuid) {
+ return mapper(dbSession).selectComponentKeysHavingIssuesToMerge(mergeBranchUuid);
+ }
+
+ /**
+ * Scroll all <strong>enabled</strong> files of the specified project (same project_uuid) in no specific order with
+ * 'SOURCE' source and a non null path.
+ */
+ public void scrollAllFilesForFileMove(DbSession session, String projectUuid, ResultHandler<FileMoveRowDto> handler) {
+ mapper(session).scrollAllFilesForFileMove(projectUuid, handler);
+ }
+
public void insert(DbSession session, ComponentDto item) {
mapper(session).insert(item);
}
mapper(session).delete(componentId);
}
- public List<KeyWithUuidDto> selectComponentKeysHavingIssuesToMerge(DbSession dbSession, String mergeBranchUuid) {
- return mapper(dbSession).selectComponentKeysHavingIssuesToMerge(mergeBranchUuid);
- }
-
private static void checkThatNotTooManyComponents(ComponentQuery query) {
checkThatNotTooManyConditions(query.getComponentIds(), "Too many component ids in query");
checkThatNotTooManyConditions(query.getComponentKeys(), "Too many component keys in query");
void scrollForIndexing(@Param("projectUuid") @Nullable String projectUuid, ResultHandler<ComponentDto> handler);
+ void scrollAllFilesForFileMove(@Param("projectUuid") String projectUuid, ResultHandler<FileMoveRowDto> handler);
+
void insert(ComponentDto componentDto);
void update(ComponentUpdateDto component);
--- /dev/null
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.db.component;
+
+public class FileMoveRowDto {
+ private long id;
+ private String kee;
+ private String uuid;
+ private String path;
+ private int lineCount;
+
+ public long getId() {
+ return id;
+ }
+
+ public String getKey() {
+ return kee;
+ }
+
+ public String getUuid() {
+ return uuid;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public int getLineCount() {
+ return lineCount;
+ }
+
+ @Override
+ public String toString() {
+ return "FileMoveRowDto{" +
+ "id=" + id +
+ ", kee='" + kee + '\'' +
+ ", uuid='" + uuid + '\'' +
+ ", path='" + path + '\'' +
+ ", lineCount=" + lineCount +
+ '}';
+ }
+}
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
+import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;
import javax.annotation.CheckForNull;
import org.apache.commons.dbutils.DbUtils;
import org.apache.commons.io.IOUtils;
+import org.apache.ibatis.session.ResultHandler;
import org.sonar.db.Dao;
import org.sonar.db.DbSession;
import org.sonar.db.source.FileSourceDto.Type;
+import static org.sonar.db.DatabaseUtils.toUniqueAndSortedPartitions;
+
public class FileSourceDao implements Dao {
private static final Splitter END_OF_LINE_SPLITTER = Splitter.on('\n');
}
}
+ /**
+ * Scroll line hashes of all <strong>enabled</strong> components (should be files, but not enforced) with specified
+ * keys in no specific order with 'SOURCE' source and a non null path.
+ */
+ public void scrollLineHashes(DbSession dbSession, Collection<String> fileKeys, ResultHandler<LineHashesWithKeyDto> rowHandler) {
+ for (List<String> partition : toUniqueAndSortedPartitions(fileKeys)) {
+ mapper(dbSession).scrollLineHashes(partition, rowHandler);
+ }
+ }
+
public void readLineHashesStream(DbSession dbSession, String fileUuid, Consumer<Reader> consumer) {
Connection connection = dbSession.getConnection();
PreparedStatement pstmt = null;
*/
package org.sonar.db.source;
+import java.util.Collection;
import java.util.List;
import javax.annotation.CheckForNull;
import org.apache.ibatis.annotations.Param;
+import org.apache.ibatis.session.ResultHandler;
public interface FileSourceMapper {
@CheckForNull
FileSourceDto select(@Param("fileUuid") String fileUuid, @Param("dataType") String dataType);
+ void scrollLineHashes(@Param("fileKeys") Collection<String> fileKeys, ResultHandler<LineHashesWithKeyDto> rowHandler);
+
@CheckForNull
Integer selectLineHashesVersion(@Param("fileUuid") String fileUuid, @Param("dataType") String dataType);
--- /dev/null
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.db.source;
+
+import java.util.Collections;
+import java.util.List;
+import javax.annotation.Nullable;
+
+import static org.sonar.db.source.FileSourceDto.LINES_HASHES_SPLITTER;
+
+public class LineHashesWithKeyDto {
+ private String kee;
+ private String path;
+ private String lineHashes;
+
+ public String getKey() {
+ return kee;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ /** Used by MyBatis */
+ public String getRawLineHashes() {
+ return lineHashes;
+ }
+
+ /** Used by MyBatis */
+ public void setRawLineHashes(@Nullable String lineHashes) {
+ this.lineHashes = lineHashes;
+ }
+
+ public List<String> getLineHashes() {
+ if (lineHashes == null) {
+ return Collections.emptyList();
+ }
+ return LINES_HASHES_SPLITTER.splitToList(lineHashes);
+ }
+}
</if>
</select>
+ <select id="scrollAllFilesForFileMove" parameterType="map" resultType="org.sonar.db.component.FileMoveRowDto" fetchSize="${_scrollFetchSize}" resultSetType="FORWARD_ONLY">
+ select
+ p.id,
+ p.uuid as uuid,
+ p.kee as kee,
+ p.path as path,
+ fs.line_count as lineCount
+ from projects p
+ inner join file_sources fs on
+ fs.file_uuid = p.uuid
+ and fs.data_type = 'SOURCE'
+ where
+ p.project_uuid = #{projectUuid,jdbcType=VARCHAR}
+ and p.enabled = ${_true}
+ and p.scope = 'FIL'
+ and p.qualifier in ('FIL', 'UTS')
+ and p.path is not null
+ </select>
+
<select id="selectProjectsByNameQuery" resultType="Component">
select
<include refid="componentColumns"/>
</select>
<select id="selectHashesForProject" parameterType="map" resultType="org.sonar.db.source.FileSourceDto">
- SELECT id, file_uuid as fileUuid, data_hash as dataHash, src_hash as srcHash, revision, updated_at as updatedAt
- FROM file_sources
- WHERE project_uuid = #{projectUuid} and data_type=#{dataType}
+ select
+ id,
+ file_uuid as fileUuid,
+ data_hash as dataHash,
+ src_hash as srcHash,
+ revision,
+ updated_at as updatedAt
+ from
+ file_sources
+ where
+ project_uuid = #{projectUuid}
+ and data_type=#{dataType}
+ </select>
+
+ <select id="scrollLineHashes" parameterType="map" resultType="org.sonar.db.source.LineHashesWithKeyDto" fetchSize="${_scrollFetchSize}" resultSetType="FORWARD_ONLY">
+ select
+ p.kee as kee,
+ p.path as path,
+ fs.line_hashes as rawLineHashes
+ from projects p
+ inner join file_sources fs on
+ fs.file_uuid = p.uuid
+ and fs.data_type = 'SOURCE'
+ where
+ p.kee in
+ <foreach collection="fileKeys" item="fileKey" open="(" close=")" separator=",">
+ #{fileKey,jdbcType=VARCHAR}
+ </foreach>
+ and p.path is not null
</select>
<select id="selectLineHashesVersion" parameterType="map" resultType="Integer">
--- /dev/null
+/*
+ * SonarQube
+ * Copyright (C) 2009-2018 SonarSource SA
+ * mailto:info AT sonarsource DOT com
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+package org.sonar.db.component;
+
+import com.tngtech.java.junit.dataprovider.DataProvider;
+import com.tngtech.java.junit.dataprovider.DataProviderRunner;
+import com.tngtech.java.junit.dataprovider.UseDataProvider;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import org.apache.ibatis.session.ResultContext;
+import org.apache.ibatis.session.ResultHandler;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.sonar.api.utils.System2;
+import org.sonar.db.DbSession;
+import org.sonar.db.DbTester;
+import org.sonar.db.organization.OrganizationDto;
+import org.sonar.db.source.FileSourceDto;
+
+import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.fail;
+import static org.sonar.api.resources.Qualifiers.FILE;
+import static org.sonar.api.resources.Qualifiers.UNIT_TEST_FILE;
+
+@RunWith(DataProviderRunner.class)
+public class ScrollForFileMoveComponentDaoTest {
+ @Rule
+ public DbTester db = DbTester.create(System2.INSTANCE);
+
+ private Random random = new Random();
+ private DbSession dbSession = db.getSession();
+ private ComponentDao underTest = new ComponentDao();
+
+ @Test
+ public void scrollAllFilesForFileMove_has_no_effect_if_project_does_not_exist() {
+ String nonExistingProjectUuid = randomAlphabetic(10);
+
+ underTest.scrollAllFilesForFileMove(dbSession, nonExistingProjectUuid, resultContext -> fail("handler should not be called"));
+ }
+
+ @Test
+ public void scrollAllFilesForFileMove_has_no_effect_if_project_has_no_file() {
+ OrganizationDto organization = db.organizations().insert();
+ ComponentDto project = random.nextBoolean() ? db.components().insertPrivateProject(organization) : db.components().insertPublicProject(organization);
+
+ underTest.scrollAllFilesForFileMove(dbSession, project.uuid(), resultContext -> fail("handler should not be called"));
+ }
+
+ @Test
+ public void scrollAllFilesForFileMove_ignores_files_with_null_path() {
+ OrganizationDto organization = db.organizations().insert();
+ ComponentDto project = random.nextBoolean() ? db.components().insertPrivateProject(organization) : db.components().insertPublicProject(organization);
+ ComponentAndSource file = insertFileAndSource(project, FILE);
+ ComponentAndSource ut = insertFileAndSource(project, UNIT_TEST_FILE);
+ ComponentDto fileNoPath = db.components().insertComponent(ComponentTesting.newFileDto(project).setPath(null).setQualifier(FILE));
+ db.fileSources().insertFileSource(fileNoPath);
+ ComponentDto utNoPath = db.components().insertComponent(ComponentTesting.newFileDto(project).setPath(null).setQualifier(UNIT_TEST_FILE));
+ db.fileSources().insertFileSource(utNoPath);
+ RecordingResultHandler resultHandler = new RecordingResultHandler();
+
+ underTest.scrollAllFilesForFileMove(dbSession, project.uuid(), resultHandler);
+
+ assertThat(resultHandler.dtos).hasSize(2);
+ verifyFileMoveRowDto(resultHandler, file);
+ verifyFileMoveRowDto(resultHandler, ut);
+ }
+
+ @Test
+ public void scrollAllFilesForFileMove_ignores_files_without_source() {
+ OrganizationDto organization = db.organizations().insert();
+ ComponentDto project = random.nextBoolean() ? db.components().insertPrivateProject(organization) : db.components().insertPublicProject(organization);
+ ComponentAndSource file = insertFileAndSource(project, FILE);
+ ComponentAndSource ut = insertFileAndSource(project, UNIT_TEST_FILE);
+ ComponentDto fileNoSource = db.components().insertComponent(ComponentTesting.newFileDto(project).setPath(null).setQualifier(FILE));
+ ComponentDto utNoSource = db.components().insertComponent(ComponentTesting.newFileDto(project).setPath(null).setQualifier(UNIT_TEST_FILE));
+ RecordingResultHandler resultHandler = new RecordingResultHandler();
+
+ underTest.scrollAllFilesForFileMove(dbSession, project.uuid(), resultHandler);
+
+ assertThat(resultHandler.dtos).hasSize(2);
+ verifyFileMoveRowDto(resultHandler, file);
+ verifyFileMoveRowDto(resultHandler, ut);
+ }
+
+ @Test
+ public void scrollAllFilesForFileMove_scrolls_files_of_project() {
+ OrganizationDto organization = db.organizations().insert();
+ ComponentDto project = random.nextBoolean() ? db.components().insertPrivateProject(organization) : db.components().insertPublicProject(organization);
+ ComponentDto module1 = db.components().insertComponent(ComponentTesting.newModuleDto(project));
+ ComponentDto module2 = db.components().insertComponent(ComponentTesting.newModuleDto(module1));
+ ComponentAndSource file1 = insertFileAndSource(project, FILE);
+ ComponentAndSource file2 = insertFileAndSource(module1, FILE);
+ ComponentAndSource file3 = insertFileAndSource(module2, FILE);
+ RecordingResultHandler resultHandler = new RecordingResultHandler();
+
+ underTest.scrollAllFilesForFileMove(dbSession, project.uuid(), resultHandler);
+
+ assertThat(resultHandler.dtos).hasSize(3);
+ verifyFileMoveRowDto(resultHandler, file1);
+ verifyFileMoveRowDto(resultHandler, file2);
+ verifyFileMoveRowDto(resultHandler, file3);
+ }
+
+ @Test
+ public void scrollAllFilesForFileMove_ignores_file_source_of_type_TEST() {
+ OrganizationDto organization = db.organizations().insert();
+ ComponentDto project = random.nextBoolean() ? db.components().insertPrivateProject(organization) : db.components().insertPublicProject(organization);
+ ComponentDto module1 = db.components().insertComponent(ComponentTesting.newModuleDto(project));
+ ComponentDto module2 = db.components().insertComponent(ComponentTesting.newModuleDto(module1));
+ ComponentAndSource file1 = insertFileAndSource(project, FILE);
+ ComponentDto file2 = db.components().insertComponent(ComponentTesting.newFileDto(module1).setQualifier(UNIT_TEST_FILE));
+ db.fileSources().insertFileSource(file2, t -> t.setDataType(UNIT_TEST_FILE));
+ ComponentAndSource file3 = insertFileAndSource(module2, FILE);
+ db.fileSources().insertFileSource(file3.component, t -> t.setDataType(UNIT_TEST_FILE));
+ RecordingResultHandler resultHandler = new RecordingResultHandler();
+
+ underTest.scrollAllFilesForFileMove(dbSession, project.uuid(), resultHandler);
+
+ assertThat(resultHandler.dtos).hasSize(2);
+ verifyFileMoveRowDto(resultHandler, file1);
+ verifyFileMoveRowDto(resultHandler, file3);
+ }
+
+ @Test
+ public void scrollAllFilesForFileMove_scrolls_large_number_of_files_and_uts() {
+ OrganizationDto organization = db.organizations().insert();
+ ComponentDto project = random.nextBoolean() ? db.components().insertPrivateProject(organization) : db.components().insertPublicProject(organization);
+ List<ComponentAndSource> files = IntStream.range(0, 300 + random.nextInt(500))
+ .mapToObj(i -> {
+ String qualifier = random.nextBoolean() ? FILE : UNIT_TEST_FILE;
+ ComponentDto file = db.components().insertComponent(ComponentTesting.newFileDto(project).setDbKey("f_" + i).setQualifier(qualifier));
+ FileSourceDto fileSource = db.fileSources().insertFileSource(file);
+ return new ComponentAndSource(file, fileSource);
+ })
+ .collect(Collectors.toList());
+ RecordingResultHandler resultHandler = new RecordingResultHandler();
+
+ underTest.scrollAllFilesForFileMove(dbSession, project.uuid(), resultHandler);
+
+ assertThat(resultHandler.dtos).hasSize(files.size());
+ files.forEach(f -> verifyFileMoveRowDto(resultHandler, f));
+ }
+
+ @Test
+ public void scrollAllFilesForFileMove_scrolls_unit_tests_of_project() {
+ OrganizationDto organization = db.organizations().insert();
+ ComponentDto project = random.nextBoolean() ? db.components().insertPrivateProject(organization) : db.components().insertPublicProject(organization);
+ ComponentAndSource ut = insertFileAndSource(project, UNIT_TEST_FILE);
+ RecordingResultHandler resultHandler = new RecordingResultHandler();
+
+ underTest.scrollAllFilesForFileMove(dbSession, project.uuid(), resultHandler);
+
+ assertThat(resultHandler.dtos).hasSize(1);
+ verifyFileMoveRowDto(resultHandler, ut);
+ }
+
+ @Test
+ @UseDataProvider("branchTypes")
+ public void scrollAllFilesForFileMove_scrolls_files_and_unit_tests_of_branch(BranchType branchType) {
+ OrganizationDto organization = db.organizations().insert();
+ ComponentDto project = random.nextBoolean() ? db.components().insertPrivateProject(organization) : db.components().insertPublicProject(organization);
+ ComponentDto branch = db.components().insertProjectBranch(project, t -> t.setBranchType(branchType));
+ ComponentAndSource file = insertFileAndSource(branch, FILE);
+ ComponentAndSource ut = insertFileAndSource(branch, UNIT_TEST_FILE);
+ RecordingResultHandler resultHandler = new RecordingResultHandler();
+
+ underTest.scrollAllFilesForFileMove(dbSession, branch.uuid(), resultHandler);
+
+ assertThat(resultHandler.dtos).hasSize(2);
+ verifyFileMoveRowDto(resultHandler, file);
+ verifyFileMoveRowDto(resultHandler, ut);
+ }
+
+ @DataProvider
+ public static Object[][] branchTypes() {
+ return new Object[][] {
+ {BranchType.LONG},
+ {BranchType.SHORT},
+ {BranchType.PULL_REQUEST}
+ };
+ }
+
+ @Test
+ public void scrollAllFilesForFileMove_ignores_non_file_and_non_ut_component_with_source() {
+ OrganizationDto organization = db.organizations().insert();
+ ComponentDto project = random.nextBoolean() ? db.components().insertPrivateProject(organization) : db.components().insertPublicProject(organization);
+ db.fileSources().insertFileSource(project);
+ ComponentDto module = db.components().insertComponent(ComponentTesting.newModuleDto(project));
+ db.fileSources().insertFileSource(module);
+ ComponentDto dir = db.components().insertComponent(ComponentTesting.newDirectory(module, "foo"));
+ db.fileSources().insertFileSource(dir);
+ ComponentAndSource file = insertFileAndSource(module, FILE);
+ ComponentAndSource ut = insertFileAndSource(dir, UNIT_TEST_FILE);
+ ComponentDto portfolio = random.nextBoolean() ? db.components().insertPublicPortfolio(organization) : db.components().insertPrivatePortfolio(organization);
+ db.fileSources().insertFileSource(portfolio);
+ ComponentDto subView = db.components().insertSubView(portfolio);
+ db.fileSources().insertFileSource(subView);
+ ComponentDto application = random.nextBoolean() ? db.components().insertPrivateApplication(organization) : db.components().insertPublicApplication(organization);
+ db.fileSources().insertFileSource(application);
+ RecordingResultHandler resultHandler = new RecordingResultHandler();
+
+ underTest.scrollAllFilesForFileMove(dbSession, project.uuid(), resultHandler);
+ underTest.scrollAllFilesForFileMove(dbSession, portfolio.uuid(), resultHandler);
+ underTest.scrollAllFilesForFileMove(dbSession, application.uuid(), resultHandler);
+
+ assertThat(resultHandler.dtos).hasSize(2);
+ verifyFileMoveRowDto(resultHandler, file);
+ verifyFileMoveRowDto(resultHandler, ut);
+ }
+
+ private static final class RecordingResultHandler implements ResultHandler<FileMoveRowDto> {
+ List<FileMoveRowDto> dtos = new ArrayList<>();
+
+ @Override
+ public void handleResult(ResultContext<? extends FileMoveRowDto> resultContext) {
+ dtos.add(resultContext.getResultObject());
+ }
+
+ private java.util.Optional<FileMoveRowDto> getById(long id) {
+ return dtos.stream().filter(t -> t.getId() == id).findAny();
+ }
+
+ }
+
+ private ComponentAndSource insertFileAndSource(ComponentDto parent, String qualifier) {
+ ComponentDto file = db.components().insertComponent(ComponentTesting.newFileDto(parent).setQualifier(qualifier));
+ FileSourceDto fileSource = db.fileSources().insertFileSource(file);
+ return new ComponentAndSource(file, fileSource);
+ }
+
+ private static final class ComponentAndSource {
+ private final ComponentDto component;
+ private final FileSourceDto source;
+
+ private ComponentAndSource(ComponentDto component, FileSourceDto source) {
+ this.component = component;
+ this.source = source;
+ }
+ }
+
+ private static void verifyFileMoveRowDto(RecordingResultHandler resultHander, ComponentAndSource componentAndSource) {
+ FileMoveRowDto dto = resultHander.getById(componentAndSource.component.getId()).get();
+ assertThat(dto.getKey()).isEqualTo(componentAndSource.component.getDbKey());
+ assertThat(dto.getUuid()).isEqualTo(componentAndSource.component.uuid());
+ assertThat(dto.getPath()).isEqualTo(componentAndSource.component.path());
+ assertThat(dto.getLineCount()).isEqualTo(componentAndSource.source.getLineCount());
+ }
+
+
+}
package org.sonar.db.source;
import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.io.Reader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.Random;
import java.util.function.Consumer;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
import javax.annotation.Nullable;
import org.apache.commons.io.IOUtils;
+import org.apache.ibatis.session.ResultContext;
+import org.apache.ibatis.session.ResultHandler;
import org.junit.Rule;
import org.junit.Test;
import org.sonar.api.utils.System2;
import org.sonar.db.DbSession;
import org.sonar.db.DbTester;
+import org.sonar.db.component.ComponentDto;
+import org.sonar.db.organization.OrganizationDto;
import org.sonar.db.source.FileSourceDto.Type;
import static com.google.common.collect.ImmutableList.of;
import static java.util.Collections.emptyList;
+import static java.util.Collections.emptySet;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
+import static org.sonar.db.component.ComponentTesting.newFileDto;
public class FileSourceDaoTest {
@Rule
public DbTester dbTester = DbTester.create(System2.INSTANCE);
- private DbSession session = dbTester.getSession();
+ private DbSession dbSession = dbTester.getSession();
private FileSourceDao underTest = dbTester.getDbClient().fileSourceDao();
public void select() {
dbTester.prepareDbUnit(getClass(), "shared.xml");
- FileSourceDto fileSourceDto = underTest.selectSourceByFileUuid(session, "FILE1_UUID");
+ FileSourceDto fileSourceDto = underTest.selectSourceByFileUuid(dbSession, "FILE1_UUID");
assertThat(fileSourceDto.getBinaryData()).isNotEmpty();
assertThat(fileSourceDto.getDataHash()).isEqualTo("hash");
dbTester.prepareDbUnit(getClass(), "shared.xml");
ReaderToStringConsumer fn = new ReaderToStringConsumer();
- underTest.readLineHashesStream(dbTester.getSession(), "FILE1_UUID", fn);
+ underTest.readLineHashesStream(dbSession, "FILE1_UUID", fn);
assertThat(fn.result).isEqualTo("ABC\\nDEF\\nGHI");
}
dbTester.prepareDbUnit(getClass(), "shared.xml");
ReaderToStringConsumer fn = new ReaderToStringConsumer();
- underTest.readLineHashesStream(dbTester.getSession(), "unknown", fn);
+ underTest.readLineHashesStream(dbSession, "unknown", fn);
assertThat(fn.result).isNull();
}
dbTester.prepareDbUnit(getClass(), "no_line_hashes_when_only_test_data.xml");
ReaderToStringConsumer fn = new ReaderToStringConsumer();
- underTest.readLineHashesStream(dbTester.getSession(), "FILE1_UUID", fn);
+ underTest.readLineHashesStream(dbSession, "FILE1_UUID", fn);
assertThat(fn.result).isNull();
}
.setUpdatedAt(1500000000001L)
.setLineHashesVersion(1)
.setRevision("123456789");
- underTest.insert(session, expected);
- session.commit();
+ underTest.insert(dbSession, expected);
+ dbSession.commit();
- FileSourceDto fileSourceDto = underTest.selectSourceByFileUuid(session, expected.getFileUuid());
+ FileSourceDto fileSourceDto = underTest.selectSourceByFileUuid(dbSession, expected.getFileUuid());
assertThat(fileSourceDto.getProjectUuid()).isEqualTo(expected.getProjectUuid());
assertThat(fileSourceDto.getFileUuid()).isEqualTo(expected.getFileUuid());
.setFileUuid("Bar")
.setCreatedAt(1500000000000L)
.setUpdatedAt(1500000000001L);
- underTest.insert(session, fileSourceDto);
- session.commit();
+ underTest.insert(dbSession, fileSourceDto);
+ dbSession.commit();
}
@Test
.setDataType(Type.SOURCE)
.setCreatedAt(1500000000000L)
.setUpdatedAt(1500000000001L);
- underTest.insert(session, fileSourceDto);
- session.commit();
+ underTest.insert(dbSession, fileSourceDto);
+ dbSession.commit();
- FileSourceDto res = underTest.selectSourceByFileUuid(session, fileSourceDto.getFileUuid());
+ FileSourceDto res = underTest.selectSourceByFileUuid(dbSession, fileSourceDto.getFileUuid());
assertThat(res.getLineCount()).isEqualTo(0);
assertThat(res.getLineHashes()).isEmpty();
.setDataType(Type.TEST)
.setCreatedAt(1500000000000L)
.setUpdatedAt(1500000000001L);
- underTest.insert(session, fileSourceDto);
- session.commit();
+ underTest.insert(dbSession, fileSourceDto);
+ dbSession.commit();
- FileSourceDto res = underTest.selectTestByFileUuid(session, fileSourceDto.getFileUuid());
+ FileSourceDto res = underTest.selectTestByFileUuid(dbSession, fileSourceDto.getFileUuid());
assertThat(res.getLineCount()).isEqualTo(0);
assertThat(res.getLineHashes()).isEmpty();
public void selectLineHashes_does_not_fail_when_lineshashes_is_null() {
dbTester.prepareDbUnit(getClass(), "shared.xml");
- underTest.insert(session, new FileSourceDto()
+ underTest.insert(dbSession, new FileSourceDto()
.setProjectUuid("PRJ_UUID")
.setFileUuid("FILE2_UUID")
.setBinaryData("FILE2_BINARY_DATA".getBytes())
.setCreatedAt(1500000000000L)
.setUpdatedAt(1500000000001L)
.setRevision("123456789"));
- session.commit();
+ dbSession.commit();
- assertThat(underTest.selectLineHashes(dbTester.getSession(), "FILE2_UUID")).isEmpty();
+ assertThat(underTest.selectLineHashes(dbSession, "FILE2_UUID")).isEmpty();
}
@Test
public void selectLineHashesVersion_returns_without_significant_code_by_default() {
- underTest.insert(session, new FileSourceDto()
+ underTest.insert(dbSession, new FileSourceDto()
.setProjectUuid("PRJ_UUID")
.setFileUuid("FILE2_UUID")
.setBinaryData("FILE2_BINARY_DATA".getBytes())
.setCreatedAt(1500000000000L)
.setUpdatedAt(1500000000001L)
.setRevision("123456789"));
- session.commit();
+ dbSession.commit();
- assertThat(underTest.selectLineHashesVersion(dbTester.getSession(), "FILE2_UUID")).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE);
+ assertThat(underTest.selectLineHashesVersion(dbSession, "FILE2_UUID")).isEqualTo(LineHashVersion.WITHOUT_SIGNIFICANT_CODE);
}
@Test
public void selectLineHashesVersion_succeeds() {
- underTest.insert(session, new FileSourceDto()
+ underTest.insert(dbSession, new FileSourceDto()
.setProjectUuid("PRJ_UUID")
.setFileUuid("FILE2_UUID")
.setBinaryData("FILE2_BINARY_DATA".getBytes())
.setUpdatedAt(1500000000001L)
.setLineHashesVersion(1)
.setRevision("123456789"));
- session.commit();
+ dbSession.commit();
- assertThat(underTest.selectLineHashesVersion(dbTester.getSession(), "FILE2_UUID")).isEqualTo(LineHashVersion.WITH_SIGNIFICANT_CODE);
+ assertThat(underTest.selectLineHashesVersion(dbSession, "FILE2_UUID")).isEqualTo(LineHashVersion.WITH_SIGNIFICANT_CODE);
}
@Test
public void readLineHashesStream_does_not_fail_when_lineshashes_is_null() {
dbTester.prepareDbUnit(getClass(), "shared.xml");
- underTest.insert(session, new FileSourceDto()
+ underTest.insert(dbSession, new FileSourceDto()
.setProjectUuid("PRJ_UUID")
.setFileUuid("FILE2_UUID")
.setBinaryData("FILE2_BINARY_DATA".getBytes())
.setCreatedAt(1500000000000L)
.setUpdatedAt(1500000000001L)
.setRevision("123456789"));
- session.commit();
+ dbSession.commit();
boolean[] flag = {false};
- underTest.readLineHashesStream(dbTester.getSession(), "FILE2_UUID", new Consumer<Reader>() {
+ underTest.readLineHashesStream(dbSession, "FILE2_UUID", new Consumer<Reader>() {
@Override
public void accept(@Nullable Reader input) {
fail("function must never been called since there is no data to read");
assertThat(flag[0]).isFalse();
}
+ @Test
+ public void scrollLineHashes_has_no_effect_if_no_keys() {
+ underTest.scrollLineHashes(dbSession, emptySet(), resultContext -> fail("handler should not be called"));
+ }
+
+ @Test
+ public void scrollLineHashes_scrolls_hashes_of_specific_keys() {
+ OrganizationDto organization = dbTester.organizations().insert();
+ ComponentDto project = new Random().nextBoolean() ? dbTester.components().insertPrivateProject(organization) : dbTester.components().insertPublicProject(organization);
+ ComponentDto file1 = dbTester.components().insertComponent(newFileDto(project));
+ FileSourceDto fileSource1 = dbTester.fileSources().insertFileSource(file1);
+ ComponentDto file2 = dbTester.components().insertComponent(newFileDto(project));
+ FileSourceDto fileSource2 = dbTester.fileSources().insertFileSource(file2);
+ ComponentDto file3 = dbTester.components().insertComponent(newFileDto(project));
+ FileSourceDto fileSource3 = dbTester.fileSources().insertFileSource(file3);
+
+ LineHashesWithKeyDtoHandler handler = scrollLineHashes(file1.getDbKey());
+ assertThat(handler.dtos).hasSize(1);
+ verifyLinesHashes(handler, file1, fileSource1);
+
+ handler = scrollLineHashes(file2.getDbKey());
+ assertThat(handler.dtos).hasSize(1);
+ verifyLinesHashes(handler, file2, fileSource2);
+
+ handler = scrollLineHashes(file2.getDbKey(), file1.getDbKey(), file3.getDbKey());
+ assertThat(handler.dtos).hasSize(3);
+ verifyLinesHashes(handler, file1, fileSource1);
+ verifyLinesHashes(handler, file2, fileSource2);
+ verifyLinesHashes(handler, file3, fileSource3);
+ }
+
+ @Test
+ public void scrollLineHashes_does_not_scroll_hashes_of_component_without_path() {
+ OrganizationDto organization = dbTester.organizations().insert();
+ ComponentDto project = new Random().nextBoolean() ? dbTester.components().insertPrivateProject(organization) : dbTester.components().insertPublicProject(organization);
+ ComponentDto file1 = dbTester.components().insertComponent(newFileDto(project));
+ FileSourceDto fileSource1 = dbTester.fileSources().insertFileSource(file1);
+ ComponentDto file2 = dbTester.components().insertComponent(newFileDto(project).setPath(null));
+ FileSourceDto fileSource2 = dbTester.fileSources().insertFileSource(file2);
+
+ LineHashesWithKeyDtoHandler handler = scrollLineHashes(file2.getDbKey(), file1.getDbKey());
+ assertThat(handler.dtos).hasSize(1);
+ verifyLinesHashes(handler, file1, fileSource1);
+ }
+
+ @Test
+ public void scrollLineHashes_does_not_scroll_hashes_of_component_with_TEST_source() {
+ OrganizationDto organization = dbTester.organizations().insert();
+ ComponentDto project = new Random().nextBoolean() ? dbTester.components().insertPrivateProject(organization) : dbTester.components().insertPublicProject(organization);
+ ComponentDto file1 = dbTester.components().insertComponent(newFileDto(project));
+ FileSourceDto fileSource1 = dbTester.fileSources().insertFileSource(file1);
+ ComponentDto file2 = dbTester.components().insertComponent(newFileDto(project));
+ FileSourceDto fileSource2 = dbTester.fileSources().insertFileSource(file2, t -> t.setDataType(Type.TEST));
+ ComponentDto file3 = dbTester.components().insertComponent(newFileDto(project));
+ FileSourceDto fileSource3 = dbTester.fileSources().insertFileSource(file3, t -> t.setDataType(Type.SOURCE));
+ FileSourceDto testFileSource3 = dbTester.fileSources().insertFileSource(file3, t -> t.setDataType(Type.TEST));
+
+ LineHashesWithKeyDtoHandler handler = scrollLineHashes(file2.getDbKey(), file1.getDbKey(), file3.getDbKey());
+ assertThat(handler.dtos).hasSize(2);
+ verifyLinesHashes(handler, file1, fileSource1);
+ verifyLinesHashes(handler, file3, fileSource3);
+ }
+
+ @Test
+ public void scrollLineHashes_handles_scrolling_more_than_1000_files() {
+ OrganizationDto organization = dbTester.organizations().insert();
+ ComponentDto project = new Random().nextBoolean() ? dbTester.components().insertPrivateProject(organization) : dbTester.components().insertPublicProject(organization);
+ List<ComponentDto> files = IntStream.range(0, 1001 + new Random().nextInt(5))
+ .mapToObj(i -> {
+ ComponentDto file = dbTester.components().insertComponent(newFileDto(project));
+ dbTester.fileSources().insertFileSource(file);
+ return file;
+ })
+ .collect(Collectors.toList());
+
+ LineHashesWithKeyDtoHandler handler = new LineHashesWithKeyDtoHandler();
+ underTest.scrollLineHashes(dbSession, files.stream().map(ComponentDto::getDbKey).collect(Collectors.toSet()), handler);
+
+ assertThat(handler.dtos).hasSize(files.size());
+ files.forEach(t -> assertThat(handler.getByKey(t.getDbKey())).isPresent());
+ }
+
+ private LineHashesWithKeyDtoHandler scrollLineHashes(String... keys) {
+ LineHashesWithKeyDtoHandler handler = new LineHashesWithKeyDtoHandler();
+ underTest.scrollLineHashes(dbSession, ImmutableSet.copyOf(keys), handler);
+ return handler;
+ }
+
+ private static void verifyLinesHashes(LineHashesWithKeyDtoHandler handler, ComponentDto file, FileSourceDto fileSource) {
+ LineHashesWithKeyDto dto = handler.getByKey(file.getDbKey()).get();
+ assertThat(dto.getPath()).isEqualTo(file.path());
+ assertThat(dto.getRawLineHashes()).isEqualTo(fileSource.getRawLineHashes());
+ assertThat(dto.getLineHashes()).isEqualTo(fileSource.getLineHashes());
+ }
+
+ private static final class LineHashesWithKeyDtoHandler implements ResultHandler<LineHashesWithKeyDto> {
+ private final List<LineHashesWithKeyDto> dtos = new ArrayList<>();
+
+ @Override
+ public void handleResult(ResultContext<? extends LineHashesWithKeyDto> resultContext) {
+ dtos.add(resultContext.getResultObject());
+ }
+
+ public Optional<LineHashesWithKeyDto> getByKey(String key) {
+ return dtos.stream()
+ .filter(t -> key.equals(t.getKey()))
+ .findAny();
+ }
+ }
+
@Test
public void update() {
dbTester.prepareDbUnit(getClass(), "shared.xml");
- underTest.update(session, new FileSourceDto()
+ underTest.update(dbSession, new FileSourceDto()
.setId(101L)
.setProjectUuid("PRJ_UUID")
.setFileUuid("FILE1_UUID")
.setUpdatedAt(1500000000002L)
.setLineHashesVersion(1)
.setRevision("987654321"));
- session.commit();
+ dbSession.commit();
dbTester.assertDbUnitTable(getClass(), "update-result.xml", "file_sources", "project_uuid", "file_uuid",
"data_hash", "line_hashes", "src_hash", "created_at", "updated_at", "data_type", "revision", "line_hashes_version");
.setLineHashes(lineHashes)
.setCreatedAt(1500000000000L)
.setUpdatedAt(1500000000001L);
- underTest.insert(session, fileSourceDto);
- session.commit();
+ underTest.insert(dbSession, fileSourceDto);
+ dbSession.commit();
- FileSourceDto resBefore = underTest.selectSourceByFileUuid(session, fileSourceDto.getFileUuid());
+ FileSourceDto resBefore = underTest.selectSourceByFileUuid(dbSession, fileSourceDto.getFileUuid());
assertThat(resBefore.getLineCount()).isEqualTo(lineHashes.size());
assertThat(resBefore.getLineHashes()).isEqualTo(lineHashes);
fileSourceDto.setId(resBefore.getId());
fileSourceDto.setLineHashes(emptyList());
- underTest.update(session, fileSourceDto);
- session.commit();
+ underTest.update(dbSession, fileSourceDto);
+ dbSession.commit();
- FileSourceDto res = underTest.selectSourceByFileUuid(session, fileSourceDto.getFileUuid());
+ FileSourceDto res = underTest.selectSourceByFileUuid(dbSession, fileSourceDto.getFileUuid());
assertThat(res.getLineHashes()).isEmpty();
assertThat(res.getLineCount()).isEqualTo(1);
}
import org.sonar.server.computation.task.projectanalysis.event.EventRepositoryImpl;
import org.sonar.server.computation.task.projectanalysis.filemove.FileSimilarityImpl;
import org.sonar.server.computation.task.projectanalysis.filemove.MutableMovedFilesRepositoryImpl;
-import org.sonar.server.computation.task.projectanalysis.filemove.ScoreMatrixDumper;
+import org.sonar.server.computation.task.projectanalysis.filemove.ScoreMatrixDumperImpl;
import org.sonar.server.computation.task.projectanalysis.filemove.SourceSimilarityImpl;
import org.sonar.server.computation.task.projectanalysis.filesystem.ComputationTempFolderProvider;
import org.sonar.server.computation.task.projectanalysis.issue.BaseIssuesLoader;
ShortBranchIssueMerger.class,
// filemove
- ScoreMatrixDumper.class,
+ ScoreMatrixDumperImpl.class,
SourceSimilarityImpl.class,
FileSimilarityImpl.class,
MutableMovedFilesRepositoryImpl.class,
package org.sonar.server.computation.task.projectanalysis.filemove;
import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import javax.annotation.CheckForNull;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
-import org.sonar.api.resources.Qualifiers;
+import org.apache.ibatis.session.ResultContext;
+import org.apache.ibatis.session.ResultHandler;
import org.sonar.api.utils.log.Logger;
import org.sonar.api.utils.log.Loggers;
import org.sonar.core.util.logs.Profiler;
+import org.sonar.core.util.stream.MoreCollectors;
import org.sonar.db.DbClient;
import org.sonar.db.DbSession;
-import org.sonar.db.component.ComponentDto;
-import org.sonar.db.component.ComponentTreeQuery;
-import org.sonar.db.component.ComponentTreeQuery.Strategy;
-import org.sonar.db.source.FileSourceDto;
+import org.sonar.db.component.FileMoveRowDto;
+import org.sonar.db.source.LineHashesWithKeyDto;
import org.sonar.server.computation.task.projectanalysis.analysis.AnalysisMetadataHolder;
import org.sonar.server.computation.task.projectanalysis.component.Component;
import org.sonar.server.computation.task.projectanalysis.component.CrawlerDepthLimit;
import org.sonar.server.computation.task.step.ComputationStep;
import static com.google.common.collect.FluentIterable.from;
-import static java.util.Arrays.asList;
import static org.sonar.server.computation.task.projectanalysis.component.ComponentVisitor.Order.POST_ORDER;
public class FileMoveDetectionStep implements ComputationStep {
protected static final int MIN_REQUIRED_SCORE = 85;
private static final Logger LOG = Loggers.get(FileMoveDetectionStep.class);
- private static final List<String> FILE_QUALIFIERS = asList(Qualifiers.FILE, Qualifiers.UNIT_TEST_FILE);
+ private static final Comparator<ScoreMatrix.ScoreFile> SCORE_FILE_COMPARATOR = (o1, o2) -> -1 * Integer.compare(o1.getLineCount(), o2.getLineCount());
+ private static final double LOWER_BOUND_RATIO = 0.84;
+ private static final double UPPER_BOUND_RATIO = 1.18;
private final AnalysisMetadataHolder analysisMetadataHolder;
private final TreeRootHolder rootHolder;
private Map<String, DbComponent> getDbFilesByKey() {
try (DbSession dbSession = dbClient.openSession(false)) {
- // FIXME no need to use such a complex query, joining on SNAPSHOTS and retrieving all column of table PROJECTS, replace with dedicated
- // mapper method
- List<ComponentDto> componentDtos = dbClient.componentDao().selectDescendants(
- dbSession,
- ComponentTreeQuery.builder()
- .setBaseUuid(rootHolder.getRoot().getUuid())
- .setQualifiers(FILE_QUALIFIERS)
- .setStrategy(Strategy.LEAVES)
- .build());
- return from(componentDtos)
- .transform(componentDto -> new DbComponent(componentDto.getId(), componentDto.getDbKey(), componentDto.uuid(), componentDto.path()))
- .uniqueIndex(DbComponent::getKey);
+ ImmutableList.Builder<DbComponent> builder = ImmutableList.builder();
+ dbClient.componentDao().scrollAllFilesForFileMove(dbSession, rootHolder.getRoot().getUuid(),
+ resultContext -> {
+ FileMoveRowDto row = resultContext.getResultObject();
+ builder.add(new DbComponent(row.getId(), row.getKey(), row.getUuid(), row.getPath(), row.getLineCount()));
+ });
+ return builder.build().stream()
+ .collect(MoreCollectors.uniqueIndex(DbComponent::getKey));
}
}
return builder.build();
}
- private ScoreMatrix computeScoreMatrix(Map<String, DbComponent> dtosByKey, Set<String> dbFileKeys, Map<String, File> reportFileSourcesByKey) {
- int[][] scoreMatrix = new int[dbFileKeys.size()][reportFileSourcesByKey.size()];
- int maxScore = 0;
+ private ScoreMatrix computeScoreMatrix(Map<String, DbComponent> dtosByKey, Set<String> removedFileKeys, Map<String, File> newFileSourcesByKey) {
+ ScoreMatrix.ScoreFile[] newFiles = newFileSourcesByKey.entrySet().stream()
+ .map(e -> new ScoreMatrix.ScoreFile(e.getKey(), e.getValue().getLineCount()))
+ .toArray(ScoreMatrix.ScoreFile[]::new);
+ ScoreMatrix.ScoreFile[] removedFiles = removedFileKeys.stream()
+ .map(key -> {
+ DbComponent dbComponent = dtosByKey.get(key);
+ return new ScoreMatrix.ScoreFile(dbComponent.getKey(), dbComponent.getLineCount());
+ })
+ .toArray(ScoreMatrix.ScoreFile[]::new);
+ // sort by highest line count first
+ Arrays.sort(newFiles, SCORE_FILE_COMPARATOR);
+ Arrays.sort(removedFiles, SCORE_FILE_COMPARATOR);
+ int[][] scoreMatrix = new int[removedFiles.length][newFiles.length];
+ int lastNewFileIndex = newFiles.length - 1;
+
+ Map<String, Integer> removedFilesIndexes = new HashMap<>(removedFileKeys.size());
+ for (int removeFileIndex = 0; removeFileIndex < removedFiles.length; removeFileIndex++) {
+ ScoreMatrix.ScoreFile removedFile = removedFiles[removeFileIndex];
+ int lowerBound = (int) Math.floor(removedFile.getLineCount() * LOWER_BOUND_RATIO);
+ int upperBound = (int) Math.ceil(removedFile.getLineCount() * UPPER_BOUND_RATIO);
+ // no need to compute score if all files are out of bound, so no need to load line hashes from DB
+ if (newFiles[0].getLineCount() <= lowerBound || newFiles[lastNewFileIndex].getLineCount() >= upperBound) {
+ continue;
+ }
+ removedFilesIndexes.put(removedFile.getFileKey(), removeFileIndex);
+ }
+ LineHashesWithKeyDtoResultHandler rowHandler = new LineHashesWithKeyDtoResultHandler(removedFilesIndexes, removedFiles,
+ newFiles, newFileSourcesByKey, scoreMatrix);
try (DbSession dbSession = dbClient.openSession(false)) {
- int dbFileIndex = 0;
- for (String removedFileKey : dbFileKeys) {
- File fileInDb = getFile(dbSession, dtosByKey.get(removedFileKey));
- if (fileInDb == null) {
+ dbClient.fileSourceDao().scrollLineHashes(dbSession, removedFilesIndexes.keySet(), rowHandler);
+ }
+
+ return new ScoreMatrix(removedFiles, newFiles, scoreMatrix, rowHandler.getMaxScore());
+ }
+
+ private final class LineHashesWithKeyDtoResultHandler implements ResultHandler<LineHashesWithKeyDto> {
+ private final Map<String, Integer> removedFilesIndexes;
+ private final ScoreMatrix.ScoreFile[] removedFiles;
+ private final ScoreMatrix.ScoreFile[] newFiles;
+ private final Map<String, File> newFileSourcesByKey;
+ private final int[][] scoreMatrix;
+ private int maxScore;
+
+ private LineHashesWithKeyDtoResultHandler(Map<String, Integer> removedFilesIndexes, ScoreMatrix.ScoreFile[] removedFiles,
+ ScoreMatrix.ScoreFile[] newFiles, Map<String, File> newFileSourcesByKey,
+ int[][] scoreMatrix) {
+ this.removedFilesIndexes = removedFilesIndexes;
+ this.removedFiles = removedFiles;
+ this.newFiles = newFiles;
+ this.newFileSourcesByKey = newFileSourcesByKey;
+ this.scoreMatrix = scoreMatrix;
+ }
+
+ @Override
+ public void handleResult(ResultContext<? extends LineHashesWithKeyDto> resultContext) {
+ LineHashesWithKeyDto lineHashesDto = resultContext.getResultObject();
+ if (lineHashesDto.getPath() == null) {
+ return;
+ }
+ int removeFileIndex = removedFilesIndexes.get(lineHashesDto.getKey());
+ ScoreMatrix.ScoreFile removedFile = removedFiles[removeFileIndex];
+ int lowerBound = (int) Math.floor(removedFile.getLineCount() * LOWER_BOUND_RATIO);
+ int upperBound = (int) Math.ceil(removedFile.getLineCount() * UPPER_BOUND_RATIO);
+
+ for (int newFileIndex = 0; newFileIndex < newFiles.length; newFileIndex++) {
+ ScoreMatrix.ScoreFile newFile = newFiles[newFileIndex];
+ if (newFile.getLineCount() >= upperBound) {
continue;
}
+ if (newFile.getLineCount() <= lowerBound) {
+ break;
+ }
- int reportFileIndex = 0;
- for (Map.Entry<String, File> reportFileSourceAndKey : reportFileSourcesByKey.entrySet()) {
- File unmatchedFile = reportFileSourceAndKey.getValue();
- int score = fileSimilarity.score(fileInDb, unmatchedFile);
- scoreMatrix[dbFileIndex][reportFileIndex] = score;
- if (score > maxScore) {
- maxScore = score;
- }
- reportFileIndex++;
+ File fileInDb = new File(lineHashesDto.getPath(), lineHashesDto.getLineHashes());
+ File unmatchedFile = newFileSourcesByKey.get(newFile.getFileKey());
+ int score = fileSimilarity.score(fileInDb, unmatchedFile);
+ scoreMatrix[removeFileIndex][newFileIndex] = score;
+ if (score > maxScore) {
+ maxScore = score;
}
- dbFileIndex++;
}
}
- return new ScoreMatrix(dbFileKeys, reportFileSourcesByKey, scoreMatrix, maxScore);
- }
-
- @CheckForNull
- private File getFile(DbSession dbSession, DbComponent dbComponent) {
- if (dbComponent.getPath() == null) {
- return null;
- }
- FileSourceDto fileSourceDto = dbClient.fileSourceDao().selectSourceByFileUuid(dbSession, dbComponent.getUuid());
- if (fileSourceDto == null) {
- return null;
+ int getMaxScore() {
+ return maxScore;
}
- return new File(dbComponent.getPath(), fileSourceDto.getLineHashes());
}
private static ElectedMatches electMatches(Set<String> dbFileKeys, Map<String, File> reportFileSourcesByKey, MatchesByScore matchesByScore) {
private final String key;
private final String uuid;
private final String path;
+ private final int lineCount;
- private DbComponent(long id, String key, String uuid, String path) {
+ private DbComponent(long id, String key, String uuid, String path, int lineCount) {
this.id = id;
this.key = key;
this.uuid = uuid;
this.path = path;
+ this.lineCount = lineCount;
}
public long getId() {
public String getPath() {
return path;
}
+
+ public int getLineCount() {
+ return lineCount;
+ }
}
private static class ElectedMatches implements Iterable<Match> {
final class File {
private final String path;
private final List<String> lineHashes;
+ private final int lineCount;
public File(String path, List<String> lineHashes) {
this.path = requireNonNull(path, "path can not be null");
this.lineHashes = requireNonNull(lineHashes, "lineHashes can not be null");
+ this.lineCount = lineHashes.size();
}
public String getPath() {
public List<String> getLineHashes() {
return lineHashes;
}
+
+ public int getLineCount() {
+ return lineCount;
+ }
}
int score(File file1, File file2);
}
@Override
- public void visit(String dbFileKey, String reportFileKey, int score) {
+ public void visit(ScoreMatrix.ScoreFile removedFile, ScoreMatrix.ScoreFile newFile, int score) {
if (!isAcceptableScore(score)) {
return;
}
if (matches[index] == null) {
matches[index] = new ArrayList<>(1);
}
- Match match = new Match(dbFileKey, reportFileKey);
+ Match match = new Match(removedFile.getFileKey(), newFile.getFileKey());
matches[index].add(match);
totalMatches++;
}
*/
package org.sonar.server.computation.task.projectanalysis.filemove;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
+import java.util.Arrays;
final class ScoreMatrix {
- private final Set<String> dbFileKeys;
- private final Map<String, FileSimilarity.File> reportFileSourcesByKey;
+ private final ScoreFile[] removedFiles;
+ private final ScoreFile[] newFiles;
private final int[][] scores;
private final int maxScore;
- public ScoreMatrix(Set<String> dbFileKeys, Map<String, FileSimilarity.File> reportFileSourcesByKey, int[][] scores, int maxScore) {
- this.dbFileKeys = dbFileKeys;
- this.reportFileSourcesByKey = reportFileSourcesByKey;
+ public ScoreMatrix(ScoreFile[] removedFiles, ScoreFile[] newFiles, int[][] scores, int maxScore) {
+ this.removedFiles = removedFiles;
+ this.newFiles = newFiles;
this.scores = scores;
this.maxScore = maxScore;
}
public void accept(ScoreMatrixVisitor visitor) {
- int dbFileIndex = 0;
- for (String dbFileKey : dbFileKeys) {
- int reportFileIndex = 0;
- for (Map.Entry<String, FileSimilarity.File> reportFileSourceAndKey : reportFileSourcesByKey.entrySet()) {
- int score = scores[dbFileIndex][reportFileIndex];
- visitor.visit(dbFileKey, reportFileSourceAndKey.getKey(), score);
- reportFileIndex++;
+ for (int removedFileIndex = 0; removedFileIndex < removedFiles.length; removedFileIndex++) {
+ for (int newFileIndex = 0; newFileIndex < newFiles.length; newFileIndex++) {
+ int score = scores[removedFileIndex][newFileIndex];
+ visitor.visit(removedFiles[removedFileIndex], newFiles[newFileIndex], score);
}
- dbFileIndex++;
}
}
public String toCsv(char separator) {
StringBuilder res = new StringBuilder();
// first row: empty column, then one column for each report file (its key)
- res.append(separator);
- for (Map.Entry<String, FileSimilarity.File> reportEntry : reportFileSourcesByKey.entrySet()) {
- res.append(reportEntry.getKey()).append(separator);
- }
+ res.append("newFiles=>").append(separator);
+ Arrays.stream(newFiles).forEach(f -> res.append(f.getFileKey()).append('(').append(f.getLineCount()).append(')').append(separator));
// rows with data: column with db file (its key), then one column for each value
accept(new ScoreMatrixVisitor() {
- private String previousDbFileKey = null;
+ private ScoreFile previousRemovedFile = null;
@Override
- public void visit(String dbFileKey, String reportFileKey, int score) {
- if (!Objects.equals(previousDbFileKey, dbFileKey)) {
- res.append('\n').append(dbFileKey).append(separator);
- previousDbFileKey = dbFileKey;
+ public void visit(ScoreFile removedFile, ScoreFile newFile, int score) {
+ if (previousRemovedFile != removedFile) {
+ res.append('\n').append(removedFile.getFileKey()).append('(').append(removedFile.getLineCount()).append(')').append(separator);
+ previousRemovedFile = removedFile;
}
res.append(score).append(separator);
}
@FunctionalInterface
public interface ScoreMatrixVisitor {
- void visit(String dbFileKey, String reportFileKey, int score);
+ void visit(ScoreFile removedFile, ScoreFile newFile, int score);
}
public int getMaxScore() {
return maxScore;
}
+
+ static class ScoreFile {
+ private final String fileKey;
+ private final int lineCount;
+
+ ScoreFile(String fileKey, int lineCount) {
+ this.fileKey = fileKey;
+ this.lineCount = lineCount;
+ }
+
+ public String getFileKey() {
+ return fileKey;
+ }
+
+ public int getLineCount() {
+ return lineCount;
+ }
+
+ }
}
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
+import java.util.stream.IntStream;
import javax.annotation.CheckForNull;
import javax.annotation.Nullable;
import org.apache.commons.io.FileUtils;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.sonar.server.computation.task.projectanalysis.component.ReportComponent.builder;
+import static org.sonar.server.computation.task.projectanalysis.filemove.FileMoveDetectionStep.MIN_REQUIRED_SCORE;
public class FileMoveDetectionStepTest {
"package org.sonar.server.computation.task.projectanalysis.filemove;",
"",
"public class Foo {",
+ " public String foo() {",
+ " return \"Donut!\";",
+ " }",
"}"
};
private static final String[] CONTENT_EMPTY = {
private SourceLinesHashRepository sourceLinesHash = mock(SourceLinesHashRepository.class);
private FileSimilarity fileSimilarity = new FileSimilarityImpl(new SourceSimilarityImpl());
- private ScoreMatrixDumper scoreMatrixDumper = mock(ScoreMatrixDumper.class);
+ private CapturingScoreMatrixDumper scoreMatrixDumper = new CapturingScoreMatrixDumper();
private FileMoveDetectionStep underTest = new FileMoveDetectionStep(analysisMetadataHolder, treeRootHolder, dbClient,
fileSimilarity, movedFilesRepository, sourceLinesHash, scoreMatrixDumper);
underTest.execute();
assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
+ assertThat(scoreMatrixDumper.scoreMatrix.getMaxScore())
+ .isGreaterThan(0)
+ .isLessThan(MIN_REQUIRED_SCORE);
}
@Test
underTest.execute();
assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
+ assertThat(scoreMatrixDumper.scoreMatrix.getMaxScore()).isZero();
}
@Test
underTest.execute();
assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
+ assertThat(scoreMatrixDumper.scoreMatrix).isNull();
}
@Test
underTest.execute();
assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
+ assertThat(scoreMatrixDumper.scoreMatrix.getMaxScore()).isZero();
}
@Test
underTest.execute();
assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
+ assertThat(scoreMatrixDumper.scoreMatrix.getMaxScore()).isEqualTo(100);
}
@Test
underTest.execute();
assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
+ assertThat(scoreMatrixDumper.scoreMatrix.getMaxScore()).isEqualTo(100);
}
@Test
underTest.execute();
assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
+ assertThat(scoreMatrixDumper.scoreMatrix).isNull();
}
@Test
assertThat(originalFile5.getId()).isEqualTo(dtos[3].getId());
assertThat(originalFile5.getKey()).isEqualTo(dtos[3].getDbKey());
assertThat(originalFile5.getUuid()).isEqualTo(dtos[3].uuid());
+ assertThat(scoreMatrixDumper.scoreMatrix.getMaxScore()).isGreaterThan(MIN_REQUIRED_SCORE);
+ }
+
+ @Test
+ public void execute_does_not_compute_any_distance_if_all_files_sizes_are_all_too_different() {
+ analysisMetadataHolder.setBaseAnalysis(ANALYSIS);
+ Component file4 = fileComponent(5);
+ insertFiles(FILE_1.getKey(), FILE_2.getKey());
+ insertContentOfFileInDb(FILE_1.getKey(), arrayOf(100));
+ insertContentOfFileInDb(FILE_2.getKey(), arrayOf(30));
+ setFilesInReport(FILE_3, file4);
+ setFileLineHashesInReport(FILE_3, arrayOf(118));
+ setFileLineHashesInReport(file4, arrayOf(25));
+
+ underTest.execute();
+
+ assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
+ assertThat(scoreMatrixDumper.scoreMatrix.getMaxScore()).isZero();
+ }
+
+ /**
+ * Creates an array of {@code numberOfElements} int values as String, starting with zero.
+ */
+ private static String[] arrayOf(int numberOfElements) {
+ return IntStream.range(0, numberOfElements).mapToObj(String::valueOf).toArray(String[]::new);
}
/**
.build();
}
+ private static class CapturingScoreMatrixDumper implements ScoreMatrixDumper {
+ private ScoreMatrix scoreMatrix;
+
+ @Override
+ public void dumpAsCsv(ScoreMatrix scoreMatrix) {
+ this.scoreMatrix = scoreMatrix;
+ }
+ }
}
package org.sonar.server.computation.task.projectanalysis.filemove;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import java.util.Arrays;
-import java.util.Collections;
import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.Random;
import org.junit.Test;
+import org.sonar.server.computation.task.projectanalysis.filemove.ScoreMatrix.ScoreFile;
-import static com.google.common.collect.ImmutableSet.of;
-import static java.util.Collections.emptyList;
-import static java.util.Collections.emptySet;
import static org.assertj.core.api.Assertions.assertThat;
import static org.sonar.server.computation.task.projectanalysis.filemove.FileMoveDetectionStep.MIN_REQUIRED_SCORE;
@Test
public void creates_returns_always_the_same_instance_of_maxScore_is_less_than_min_required_score() {
- Set<String> doesNotMatterDbFileKeys = emptySet();
- Map<String, FileSimilarity.File> doesNotMatterReportFiles = Collections.emptyMap();
+ ScoreFile[] doesNotMatterRemovedFiles = new ScoreFile[0];
+ ScoreFile[] doesNotMatterNewFiles = new ScoreFile[0];
int[][] doesNotMatterScores = new int[0][0];
- ScoreMatrix scoreMatrix1 = new ScoreMatrix(doesNotMatterDbFileKeys, doesNotMatterReportFiles, doesNotMatterScores, MIN_REQUIRED_SCORE - 1);
+ ScoreMatrix scoreMatrix1 = new ScoreMatrix(doesNotMatterRemovedFiles, doesNotMatterNewFiles, doesNotMatterScores, MIN_REQUIRED_SCORE - 1);
MatchesByScore matchesByScore = MatchesByScore.create(scoreMatrix1);
assertThat(matchesByScore.getSize()).isEqualTo(0);
assertThat(matchesByScore).isEmpty();
- ScoreMatrix scoreMatrix2 = new ScoreMatrix(doesNotMatterDbFileKeys, doesNotMatterReportFiles, doesNotMatterScores, MIN_REQUIRED_SCORE - 5);
+ ScoreMatrix scoreMatrix2 = new ScoreMatrix(doesNotMatterRemovedFiles, doesNotMatterNewFiles, doesNotMatterScores, MIN_REQUIRED_SCORE - 5);
assertThat(MatchesByScore.create(scoreMatrix2)).isSameAs(matchesByScore);
}
{8},
{85},
};
- MatchesByScore matchesByScore = MatchesByScore.create(new ScoreMatrix(
- of("A", "B", "C"), ImmutableMap.of("1", fileOf("1")), scores, maxScore));
+ MatchesByScore matchesByScore = MatchesByScore.create(new ScoreMatrix(of("A", "B", "C"), of("1"), scores, maxScore));
assertThat(matchesByScore.getSize()).isEqualTo(2);
assertThat(Lists.newArrayList(matchesByScore)).isEqualTo(Arrays.asList(
));
}
- private static FileSimilarity.File fileOf(String key) {
- return new FileSimilarity.File("path of " + key, emptyList());
+ private ScoreFile[] of(String... fileKeys) {
+ return Arrays.stream(fileKeys)
+ .map(key -> new ScoreFile(key, new Random().nextInt(40)))
+ .toArray(ScoreFile[]::new);
}
+
}
*/
package org.sonar.server.computation.task.projectanalysis.filemove;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
import com.tngtech.java.junit.dataprovider.DataProvider;
import com.tngtech.java.junit.dataprovider.DataProviderRunner;
import com.tngtech.java.junit.dataprovider.UseDataProvider;
import org.sonar.api.config.Configuration;
import org.sonar.api.config.internal.MapSettings;
import org.sonar.ce.queue.CeTask;
+import org.sonar.server.computation.task.projectanalysis.filemove.ScoreMatrix.ScoreFile;
import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(DataProviderRunner.class)
public class ScoreMatrixDumperImplTest {
private static final ScoreMatrix A_SCORE_MATRIX = new ScoreMatrix(
- ImmutableSet.of("A", "B"),
- ImmutableMap.of("1", new FileSimilarity.File("path_1", ImmutableList.of("foo", "bar"))),
+ new ScoreFile[] {new ScoreFile("A", 12), new ScoreFile("B", 8)},
+ new ScoreFile[] {new ScoreFile("1", 7)},
new int[][] {{10}, {2}},
10);
private MapSettings settings = new MapSettings();
public void finding_threshold_in_line_count_to_go_below_85_score() {
assertThat(underTest.score(listOf(100), listOf(115))).isEqualTo(86);
assertThat(underTest.score(listOf(100), listOf(116))).isEqualTo(86);
- assertThat(underTest.score(listOf(100), listOf(117))).isEqualTo(85);
- assertThat(underTest.score(listOf(100), listOf(118))).isEqualTo(84); // 84.74%
+ assertThat(underTest.score(listOf(100), listOf(117))).isEqualTo(85); // 85.47% - 117%
+ assertThat(underTest.score(listOf(100), listOf(118))).isEqualTo(84); // 84.74% - 118%
- assertThat(underTest.score(listOf(50), listOf(58))).isEqualTo(86);
- assertThat(underTest.score(listOf(50), listOf(59))).isEqualTo(84); // 84.74%
+ assertThat(underTest.score(listOf(50), listOf(58))).isEqualTo(86); // 86.20% - 116%
+ assertThat(underTest.score(listOf(50), listOf(59))).isEqualTo(84); // 84.74% - 118%
- assertThat(underTest.score(listOf(25), listOf(29))).isEqualTo(86);
- assertThat(underTest.score(listOf(25), listOf(30))).isEqualTo(83); // 83.33%
+ assertThat(underTest.score(listOf(25), listOf(29))).isEqualTo(86); // 86.20% - 116%
+ assertThat(underTest.score(listOf(25), listOf(30))).isEqualTo(83); // 83.33% - 120%
- assertThat(underTest.score(listOf(12), listOf(14))).isEqualTo(85);
- assertThat(underTest.score(listOf(12), listOf(15))).isEqualTo(80); // 80.00%
+ assertThat(underTest.score(listOf(12), listOf(14))).isEqualTo(85); // 85.71% - 116.67%
+ assertThat(underTest.score(listOf(12), listOf(15))).isEqualTo(80); // 80.00% - 125%
- assertThat(underTest.score(listOf(10), listOf(11))).isEqualTo(90);
- assertThat(underTest.score(listOf(10), listOf(12))).isEqualTo(83); // 83.33%
+ assertThat(underTest.score(listOf(10), listOf(11))).isEqualTo(90); // 90.90% - 110%
+ assertThat(underTest.score(listOf(10), listOf(12))).isEqualTo(83); // 83.33% - 120%
- assertThat(underTest.score(listOf(5), listOf(5))).isEqualTo(100);
- assertThat(underTest.score(listOf(5), listOf(6))).isEqualTo(83); // 83.33%
+ assertThat(underTest.score(listOf(5), listOf(5))).isEqualTo(100); // 100% - 100%
+ assertThat(underTest.score(listOf(5), listOf(6))).isEqualTo(83); // 83.33% - 120%
- assertThat(underTest.score(listOf(200), listOf(234))).isEqualTo(85);
- assertThat(underTest.score(listOf(200), listOf(236))).isEqualTo(84); // 84.75%
+ assertThat(underTest.score(listOf(200), listOf(234))).isEqualTo(85); // 85.47% - 117%
+ assertThat(underTest.score(listOf(200), listOf(236))).isEqualTo(84); // 84.75% - 118%
- assertThat(underTest.score(listOf(300), listOf(352))).isEqualTo(85);
- assertThat(underTest.score(listOf(300), listOf(354))).isEqualTo(84); // 84.74%
+ assertThat(underTest.score(listOf(300), listOf(352))).isEqualTo(85); // 85.23% - 117.33%
+ assertThat(underTest.score(listOf(300), listOf(354))).isEqualTo(84); // 84.74% - 118%
- assertThat(underTest.score(listOf(400), listOf(470))).isEqualTo(85);
- assertThat(underTest.score(listOf(400), listOf(471))).isEqualTo(84); // 84.92%
+ assertThat(underTest.score(listOf(400), listOf(470))).isEqualTo(85); // 85.10% - 117.50%
+ assertThat(underTest.score(listOf(400), listOf(471))).isEqualTo(84); // 84.92% - 117.75%
- assertThat(underTest.score(listOf(500), listOf(588))).isEqualTo(85);
- assertThat(underTest.score(listOf(500), listOf(589))).isEqualTo(84); // 84.88%
+ assertThat(underTest.score(listOf(500), listOf(588))).isEqualTo(85); // 85.03% - 117.60%
+ assertThat(underTest.score(listOf(500), listOf(589))).isEqualTo(84); // 84.88% - 117.80%
+ }
+
+ @Test
+ public void verify_84_percent_ratio_for_lower_bound() {
+ IntStream.range(0, 1000)
+ .forEach(ref -> lowerBoundGivesNonMeaningfulScore(ref, 0.84));
+ }
+
+ @Test
+ public void verify_118_percent_ratio_for_upper_bound() {
+ IntStream.range(0, 1000)
+ .forEach(ref -> upperBoundGivesNonMeaningfulScore(ref, 1.18));
+ }
+
+ private void lowerBoundGivesNonMeaningfulScore(Integer ref, double ratio) {
+ int lowerBound = (int) Math.floor(ref * ratio);
+ assertThat(underTest.score(listOf(ref), listOf(lowerBound)))
+ .describedAs("Score for %s%% lines of %s (ie. %s lines) should be 84 or less", ratio * 100, ref, lowerBound)
+ .isLessThanOrEqualTo(84);
+ }
+
+ private void upperBoundGivesNonMeaningfulScore(Integer ref, double ratio) {
+ int upperBound = (int) Math.ceil(ref * ratio);
+ assertThat(underTest.score(listOf(ref), listOf(upperBound)))
+ .describedAs("Score for %s%% lines of %s (ie. %s lines) should be 84 or less", ratio * 100, ref, upperBound)
+ .isLessThanOrEqualTo(84);
}
/**