import org.sonar.api.database.model.SnapshotSource;
import org.sonar.api.design.DependencyDto;
import org.sonar.api.utils.TimeProfiler;
-import org.sonar.jpa.entity.DuplicationBlock;
/**
* @since 2.5
deleteSnapshots(session, snapshotIds);
}
- public static void deleteDependencies(DatabaseSession session, List<Integer> snapshotIds) {
+ public static void deleteDependencies(DatabaseSession session, List<Integer> snapshotIds) {
executeQuery(session, "delete dependencies", snapshotIds, "delete from " + DependencyDto.class.getSimpleName() + " d where d.fromSnapshotId in (:ids)");
executeQuery(session, "delete dependencies", snapshotIds, "delete from " + DependencyDto.class.getSimpleName() + " d where d.toSnapshotId in (:ids)");
}
/**
* Delete all measures, including MEASURE_DATA
*/
- public static void deleteMeasuresBySnapshotId(DatabaseSession session, List<Integer> snapshotIds) {
+ public static void deleteMeasuresBySnapshotId(DatabaseSession session, List<Integer> snapshotIds) {
executeQuery(session, "delete measures by snapshot id", snapshotIds, "delete from " + MeasureData.class.getSimpleName() + " m where m.snapshotId in (:ids)");
executeQuery(session, "delete measures by snapshot id", snapshotIds, "delete from " + MeasureModel.class.getSimpleName() + " m where m.snapshotId in (:ids)");
}
/**
* Delete all measures, including MEASURE_DATA
*/
- public static void deleteMeasuresById(DatabaseSession session, List<Integer> measureIds) {
+ public static void deleteMeasuresById(DatabaseSession session, List<Integer> measureIds) {
executeQuery(session, "delete measures by id", measureIds, "delete from " + MeasureData.class.getSimpleName() + " m where m.measure.id in (:ids)");
executeQuery(session, "delete measures by id", measureIds, "delete from " + MeasureModel.class.getSimpleName() + " m where m.id in (:ids)");
}
/**
* Delete SNAPSHOT_SOURCES table
*/
- public static void deleteSources(DatabaseSession session, List<Integer> snapshotIds) {
+ public static void deleteSources(DatabaseSession session, List<Integer> snapshotIds) {
executeQuery(session, "delete sources", snapshotIds, "delete from " + SnapshotSource.class.getSimpleName() + " e where e.snapshotId in (:ids)");
}
/**
* Delete violations (RULE_FAILURES table)
*/
- public static void deleteViolations(DatabaseSession session, List<Integer> snapshotIds) {
+ public static void deleteViolations(DatabaseSession session, List<Integer> snapshotIds) {
executeQuery(session, "delete violations", snapshotIds, "delete from " + RuleFailureModel.class.getSimpleName() + " e where e.snapshotId in (:ids)");
}
/**
+ * Delete DUPLICATIONS_INDEX table
+ *
* @since 2.11
*/
private static void deleteDuplicationBlocks(DatabaseSession session, List<Integer> snapshotIds) {
- executeQuery(session, "delete duplication blocks", snapshotIds, "delete from " + DuplicationBlock.class.getSimpleName() + " e where e.snapshotId in (:ids)");
+ executeNativeQuery(session, "delete duplication blocks", snapshotIds, "delete from duplications_index e where e.snapshot_id in (:ids)");
}
/**
* Delete EVENTS table
*/
- public static void deleteEvents(DatabaseSession session, List<Integer> snapshotIds) {
+ public static void deleteEvents(DatabaseSession session, List<Integer> snapshotIds) {
executeQuery(session, "delete events", snapshotIds, "delete from " + Event.class.getSimpleName() + " e where e.snapshot.id in (:ids)");
}
/**
* Delete SNAPSHOTS table
*/
- public static void deleteSnapshots(DatabaseSession session, List<Integer> snapshotIds) {
+ public static void deleteSnapshots(DatabaseSession session, List<Integer> snapshotIds) {
executeQuery(session, "delete snapshots", snapshotIds, "delete from " + Snapshot.class.getSimpleName() + " s where s.id in (:ids)");
}
/**
* Paginate execution of SQL requests to avoid exceeding size of rollback segment
*/
- public static void executeQuery(DatabaseSession session, String description, List<Integer> ids, String hql) {
+ public static void executeQuery(DatabaseSession session, String description, List<Integer> ids, String hql) {
if (ids == null || ids.isEmpty()) {
return;
}
+ executeQuery(session, description, ids, session.createQuery(hql));
+ }
- TimeProfiler profiler = new TimeProfiler().setLevelToDebug().start("Execute " + description);
+ /**
+ * @since 2.13
+ */
+ private static void executeNativeQuery(DatabaseSession session, String description, List<Integer> ids, String sql) {
+ if (ids == null || ids.isEmpty()) {
+ return;
+ }
+ executeQuery(session, description, ids, session.createNativeQuery(sql));
+ }
+ /**
+ * @since 2.13
+ */
+ private static void executeQuery(DatabaseSession session, String description, List<Integer> ids, Query query) {
+ TimeProfiler profiler = new TimeProfiler().setLevelToDebug().start("Execute " + description);
int index = 0;
while (index < ids.size()) {
- Query query = session.createQuery(hql);
List<Integer> paginedSids = ids.subList(index, Math.min(ids.size(), index + MAX_IN_ELEMENTS));
query.setParameter("ids", paginedSids);
query.executeUpdate();
index += MAX_IN_ELEMENTS;
session.commit();
}
-
profiler.stop();
}
<!--parent_dependency_id="[null]" project_snapshot_id="1"-->
<!--dep_usage="INHERITS" dep_weight="1" from_scope="FIL" to_scope="FIL"/>-->
- <!--<duplications_index id="1" project_snapshot_id="1" snapshot_id="3" hash="bb" index_in_file="0" start_line="0" end_line="0" />-->
- <!--<duplications_index id="2" project_snapshot_id="1" snapshot_id="4" hash="bb" index_in_file="0" start_line="0" end_line="0" />-->
+ <!--<duplications_index project_snapshot_id="1" snapshot_id="3" hash="bb" index_in_file="0" start_line="0" end_line="0" />-->
+ <!--<duplications_index project_snapshot_id="1" snapshot_id="4" hash="bb" index_in_file="0" start_line="0" end_line="0" />-->
<events id="1" name="Version 1.0" resource_id="1" snapshot_id="1" category="VERSION" description="[null]" event_date="2008-12-02 13:58:00.00" CREATED_AT="[null]"/>
<!--events id="2" name="Version 2.0" resource_id="3" snapshot_id="3" category="VERSION" description="[null]" event_date="2008-12-02 13:58:00.00" CREATED_AT="[null]"/-->
parent_dependency_id="[null]" project_snapshot_id="1"
dep_usage="INHERITS" dep_weight="1" from_scope="FIL" to_scope="FIL"/>
- <duplications_index id="1" project_snapshot_id="1" snapshot_id="3" hash="bb" index_in_file="0" start_line="0" end_line="0"/>
- <duplications_index id="2" project_snapshot_id="1" snapshot_id="4" hash="bb" index_in_file="0" start_line="0" end_line="0"/>
+ <duplications_index project_snapshot_id="1" snapshot_id="3" hash="bb" index_in_file="0" start_line="0" end_line="0"/>
+ <duplications_index project_snapshot_id="1" snapshot_id="4" hash="bb" index_in_file="0" start_line="0" end_line="0"/>
<events id="1" name="Version 1.0" resource_id="1" snapshot_id="1" category="VERSION" description="[null]" event_date="2008-12-02 13:58:00.00" CREATED_AT="[null]"/>
<events id="2" name="Version 2.0" resource_id="3" snapshot_id="3" category="VERSION" description="[null]" event_date="2008-12-02 13:58:00.00" CREATED_AT="[null]"/>
+++ /dev/null
-/*
- * Sonar, open source software quality management tool.
- * Copyright (C) 2008-2011 SonarSource
- * mailto:contact AT sonarsource DOT com
- *
- * Sonar is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 3 of the License, or (at your option) any later version.
- *
- * Sonar is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with Sonar; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
- */
-package org.sonar.jpa.entity;
-
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.Table;
-
-/**
- * @since 2.11
- */
-@Entity
-@Table(name = "duplications_index")
-public class DuplicationBlock {
-
- public static final int BLOCK_HASH_SIZE = 50;
-
- @Id
- @Column(name = "id")
- @GeneratedValue
- private Integer id;
-
- @Column(name = "snapshot_id", updatable = false, nullable = false)
- private Integer snapshotId;
-
- @Column(name = "project_snapshot_id", updatable = false, nullable = false)
- private Integer projectSnapshotId;
-
- @Column(name = "hash", updatable = false, nullable = false, length = BLOCK_HASH_SIZE)
- private String hash;
-
- @Column(name = "index_in_file", updatable = false, nullable = false)
- private Integer indexInFile;
-
- @Column(name = "start_line", updatable = false, nullable = false)
- private Integer startLine;
-
- @Column(name = "end_line", updatable = false, nullable = false)
- private Integer endLine;
-
- public DuplicationBlock() {
- }
-
- public DuplicationBlock(Integer projectSnapshotId, Integer snapshotId, String hash, Integer indexInFile, Integer startLine, Integer endLine) {
- this.projectSnapshotId = projectSnapshotId;
- this.snapshotId = snapshotId;
- this.hash = hash;
- this.indexInFile = indexInFile;
- this.startLine = startLine;
- this.endLine = endLine;
- }
-
- public Integer getId() {
- return id;
- }
-
- public Integer getSnapshotId() {
- return snapshotId;
- }
-
- public Integer getProjectSnapshotId() {
- return projectSnapshotId;
- }
-
- public String getHash() {
- return hash;
- }
-
- public Integer getIndexInFile() {
- return indexInFile;
- }
-
- public Integer getStartLine() {
- return startLine;
- }
-
- public Integer getEndLine() {
- return endLine;
- }
-
-}
- complete the Derby DDL file used for unit tests : sonar-testing-harness/src/main/resources/org/sonar/test/persistence/sonar-test.ddl
*/
- public static final int LAST_VERSION = 238;
+ public static final int LAST_VERSION = 239;
public final static String TABLE_NAME = "schema_migrations";
*/
public final class DuplicationUnitDto {
- private Long id;
-
private Integer snapshotId;
private Integer projectSnapshotId;
this.endLine = endLine;
}
- public Long getId() {
- return id;
- }
-
- public void setId(Long id) {
- this.id = id;
- }
-
public Integer getSnapshotId() {
return snapshotId;
}
<class>org.sonar.api.rules.ActiveRuleParamChange</class>
<class>org.sonar.jpa.entity.Review</class>
<class>org.sonar.jpa.entity.NotificationQueueElement</class>
- <class>org.sonar.jpa.entity.DuplicationBlock</class>
<properties>
<property name="hibernate.current_session_context_class" value="thread"/>
</if>
</select>
- <insert id="batchInsert" parameterType="DuplicationUnit" keyColumn="id" useGeneratedKeys="false">
+ <insert id="batchInsert" parameterType="DuplicationUnit" useGeneratedKeys="false">
INSERT INTO duplications_index (snapshot_id, project_snapshot_id, hash, index_in_file, start_line, end_line)
VALUES (#{snapshotId}, #{projectSnapshotId}, #{hash}, #{indexInFile}, #{startLine}, #{endLine})
</insert>
</if>
</select>
- <insert id="batchInsert" parameterType="DuplicationUnit" keyColumn="id" useGeneratedKeys="false">
- INSERT INTO duplications_index (id, snapshot_id, project_snapshot_id, hash, index_in_file, start_line, end_line)
- VALUES (duplications_index_seq.NEXTVAL, #{snapshotId}, #{projectSnapshotId}, #{hash}, #{indexInFile}, #{startLine}, #{endLine})
+ <insert id="batchInsert" parameterType="DuplicationUnit" useGeneratedKeys="false">
+ INSERT INTO duplications_index (snapshot_id, project_snapshot_id, hash, index_in_file, start_line, end_line)
+ VALUES (#{snapshotId}, #{projectSnapshotId}, #{hash}, #{indexInFile}, #{startLine}, #{endLine})
</insert>
</mapper>
INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('236');
INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('237');
INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('238');
+INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('239');
INSERT INTO USERS(ID, LOGIN, NAME, EMAIL, CRYPTED_PASSWORD, SALT, CREATED_AT, UPDATED_AT, REMEMBER_TOKEN, REMEMBER_TOKEN_EXPIRES_AT) VALUES (1, 'admin', 'Administrator', '', 'a373a0e667abb2604c1fd571eb4ad47fe8cc0878', '48bc4b0d93179b5103fd3885ea9119498e9d161b', '2011-09-26 22:27:48.0', '2011-09-26 22:27:48.0', null, null);
ALTER TABLE USERS ALTER COLUMN ID RESTART WITH 2;
);
CREATE TABLE "DUPLICATIONS_INDEX" (
- "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
"PROJECT_SNAPSHOT_ID" INTEGER NOT NULL,
"SNAPSHOT_ID" INTEGER NOT NULL,
"HASH" VARCHAR(50) NOT NULL,
ALTER TABLE "USER_ROLES" ADD CONSTRAINT "SQL110927104437940" PRIMARY KEY ("ID");
-ALTER TABLE "DUPLICATIONS_INDEX" ADD CONSTRAINT "SQL110927104441080" PRIMARY KEY ("ID");
-
ALTER TABLE "SNAPSHOT_SOURCES" ADD CONSTRAINT "SQL110927104437590" PRIMARY KEY ("ID");
ALTER TABLE "NOTIFICATIONS" ADD CONSTRAINT "SQL110927104441030" PRIMARY KEY ("ID");
package org.sonar.persistence.duplication;
import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThat;
import java.util.Arrays;
import org.junit.Before;
import org.junit.Test;
-import org.sonar.jpa.test.AbstractDbUnitTestCase;
import org.sonar.persistence.DaoTestCase;
import org.sonar.persistence.duplication.DuplicationDao;
import org.sonar.persistence.duplication.DuplicationUnitDto;
-import com.google.common.collect.Lists;
-
public class DuplicationDaoTest extends DaoTestCase {
private DuplicationDao dao;
checkTables("shouldInsert", "duplications_index");
}
- @Test
- public void testBatchInsert() {
- List<DuplicationUnitDto> duplications = Lists.newArrayList();
- for (int i = 0; i < 50; i++) {
- duplications.add(new DuplicationUnitDto(i, i, "hash", 2, 30, 40));
- }
- dao.insert(duplications);
-
- for (DuplicationUnitDto duplication : duplications) {
- // batch insert : faster but generated ids are not returned
- assertThat(duplication.getId(), nullValue());
- }
- }
-
}
<!-- Old snapshot of another project -->
<!-- bar-old -->
- <duplications_index id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="0" end_line="0" />
+ <duplications_index project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="0" end_line="0" />
<!-- Last snapshot of another project -->
<!-- bar-last -->
- <duplications_index id="2" project_snapshot_id="3" snapshot_id="4" hash="aa" index_in_file="0" start_line="1" end_line="2" />
+ <duplications_index project_snapshot_id="3" snapshot_id="4" hash="aa" index_in_file="0" start_line="1" end_line="2" />
<!-- Old snapshot of current project -->
<!-- foo-old -->
- <duplications_index id="3" project_snapshot_id="5" snapshot_id="6" hash="bb" index_in_file="0" start_line="0" end_line="0" />
+ <duplications_index project_snapshot_id="5" snapshot_id="6" hash="bb" index_in_file="0" start_line="0" end_line="0" />
<!-- Last snapshot of current project -->
<!-- foo-last -->
- <duplications_index id="4" project_snapshot_id="7" snapshot_id="8" hash="aa" index_in_file="0" start_line="0" end_line="0" />
+ <duplications_index project_snapshot_id="7" snapshot_id="8" hash="aa" index_in_file="0" start_line="0" end_line="0" />
<!-- New snapshot of current project -->
<!-- foo -->
- <duplications_index id="5" project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="0" start_line="0" end_line="0" />
+ <duplications_index project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="0" start_line="0" end_line="0" />
<!-- Note that there is two blocks with same hash for current analysis to verify that we use "SELECT DISTINCT", -->
<!-- without "DISTINCT" we will select block from "bar-last" two times. -->
- <duplications_index id="6" project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="1" start_line="1" end_line="1" />
+ <duplications_index project_snapshot_id="9" snapshot_id="10" hash="aa" index_in_file="1" start_line="1" end_line="1" />
</dataset>
<snapshots id="2" status="U" islast="0" project_id="1" />
<projects id="1" kee="foo" enabled="1" scope="FIL" qualifier="CLA" />
- <duplications_index id="1" project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="1" end_line="2" />
+ <duplications_index project_snapshot_id="1" snapshot_id="2" hash="bb" index_in_file="0" start_line="1" end_line="2" />
</dataset>
--- /dev/null
+#
+# Sonar, entreprise quality control tool.
+# Copyright (C) 2008-2011 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# Sonar is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# Sonar is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with Sonar; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
+#
+
+#
+# Sonar 2.13
+#
+class DeleteDuplicationsId < ActiveRecord::Migration
+
+ def self.up
+ begin
+ remove_column('duplications_index', 'id')
+ end
+ end
+
+end