--- /dev/null
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
+#
+# SonarQube 5.1
+#
+class AddFileSourcesSrcHash < ActiveRecord::Migration
+
+ def self.up
+ add_column 'file_sources', :src_hash, :string, :limit => 50, :null => true
+ end
+end
.setFileUuid(fileUuid)
.setData(newData)
.setDataHash(newDataHash)
+ .setSrcHash(inputFile.hash())
.setLineHashes(lineHashesAsMd5Hex(inputFile))
.setCreatedAt(now.getTime())
.setUpdatedAt(now.getTime());
.setData(newData)
.setLineHashes(lineHashesAsMd5Hex(inputFile))
.setDataHash(newDataHash)
+ .setSrcHash(inputFile.hash())
.setUpdatedAt(now.getTime());
mapper.update(previous);
session.commit();
FileUtils.write(sameFile, "changed\ncontent");
DefaultInputFile inputFileNew = new DefaultInputFile(PROJECT_KEY, relativePathSame).setLines(2)
.setAbsolutePath(sameFile.getAbsolutePath())
+ .setHash("123456")
.setLineHashes(new byte[][] {md5("changed"), md5("content")});
when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileNew));
",,,,,,,,,,,,,,,changed\r\n,,,,,,,,,,,,,,,content\r\n");
assertThat(fileSourceDto.getLineHashes()).isEqualTo(md5Hex("changed") + "\n" + md5Hex("content"));
assertThat(fileSourceDto.getDataHash()).isEqualTo("d1a4dd62422639f665a8d80b37c59f8d");
+ assertThat(fileSourceDto.getSrcHash()).isEqualTo("123456");
}
@Test
String relativePathEmpty = "src/empty.java";
DefaultInputFile inputFileEmpty = new DefaultInputFile(PROJECT_KEY, relativePathEmpty)
.setLines(0)
+ .setHash("")
.setLineHashes(new byte[][] {});
when(inputPathCache.all()).thenReturn(Arrays.<InputPath>asList(inputFileEmpty));
data=",,,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,,content "
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
data_hash="0263047cd758c68c27683625f072f010"
+ src_hash="123456"
created_at="1412952242000" updated_at="1412952242000" />
</dataset>
data=",,,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,,content "
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
data_hash="0263047cd758c68c27683625f072f010"
+ src_hash="123456"
created_at="1412952242000" updated_at="1412952242000" />
</dataset>
data=",,,,,,,,,,,,,,,unchanged ,,,,,,,,,,,,,,,content "
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
data_hash="0263047cd758c68c27683625f072f010"
+ src_hash="123456"
created_at="1412952242000" updated_at="1412952242000" />
<file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" data="[null]"
line_hashes="[null]"
+ src_hash=""
data_hash="0" created_at="1414597442000" updated_at="1414597442000" />
</dataset>
*/
public class DatabaseVersion implements BatchComponent, ServerComponent {
- public static final int LAST_VERSION = 765;
+ public static final int LAST_VERSION = 766;
/**
* List of all the tables.n
private String data;
private String lineHashes;
private String dataHash;
+ private String srcHash;
public Long getId() {
return id;
return this;
}
+ public String getSrcHash() {
+ return srcHash;
+ }
+
+ public FileSourceDto setSrcHash(String srcHash) {
+ this.srcHash = srcHash;
+ return this;
+ }
+
public long getCreatedAt() {
return createdAt;
}
INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('763');
INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('764');
INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('765');
+INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('766');
INSERT INTO USERS(ID, LOGIN, NAME, EMAIL, CRYPTED_PASSWORD, SALT, CREATED_AT, UPDATED_AT, REMEMBER_TOKEN, REMEMBER_TOKEN_EXPIRES_AT) VALUES (1, 'admin', 'Administrator', '', 'a373a0e667abb2604c1fd571eb4ad47fe8cc0878', '48bc4b0d93179b5103fd3885ea9119498e9d161b', '1418215735482', '1418215735482', null, null);
ALTER TABLE USERS ALTER COLUMN ID RESTART WITH 2;
"DATA" CLOB(2147483647),
"LINE_HASHES" CLOB(2147483647),
"DATA_HASH" VARCHAR(50) NOT NULL,
+ "SRC_HASH" VARCHAR(50) NULL,
"CREATED_AT" BIGINT NOT NULL,
"UPDATED_AT" BIGINT NOT NULL
);
<mapper namespace="org.sonar.core.source.db.FileSourceMapper">
<select id="select" parameterType="string" resultType="org.sonar.core.source.db.FileSourceDto">
- SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt, data, line_hashes as lineHashes, data_hash as dataHash
+ SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt, data, line_hashes as lineHashes, data_hash as dataHash, src_hash as srcHash
FROM file_sources
WHERE file_uuid = #{fileUuid}
</select>
</select>
<insert id="insert" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false">
- insert into file_sources (project_uuid, file_uuid, created_at, updated_at, data, line_hashes, data_hash)
- values (#{projectUuid,jdbcType=VARCHAR}, #{fileUuid,jdbcType=VARCHAR}, #{createdAt,jdbcType=BIGINT}, #{updatedAt,jdbcType=BIGINT}, #{data,jdbcType=CLOB}, #{lineHashes,jdbcType=CLOB}, #{dataHash,jdbcType=VARCHAR})
+ insert into file_sources (project_uuid, file_uuid, created_at, updated_at, data, line_hashes, data_hash, src_hash)
+ values (#{projectUuid,jdbcType=VARCHAR}, #{fileUuid,jdbcType=VARCHAR}, #{createdAt,jdbcType=BIGINT}, #{updatedAt,jdbcType=BIGINT}, #{data,jdbcType=CLOB}, #{lineHashes,jdbcType=CLOB}, #{dataHash,jdbcType=VARCHAR}, #{srcHash,jdbcType=VARCHAR})
</insert>
<update id="update" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false">
updated_at = #{updatedAt},
data = #{data},
line_hashes = #{lineHashes},
- data_hash = #{dataHash}
+ data_hash = #{dataHash},
+ src_hash = #{srcHash}
where id = #{id}
</update>
dao.insert(new FileSourceDto().setProjectUuid("prj").setFileUuid("file").setData("bla bla")
.setDataHash("hash2")
.setLineHashes("foo\nbar")
+ .setSrcHash("hache")
.setCreatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime())
.setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime()));
dao.update(new FileSourceDto().setId(101L).setProjectUuid("prj").setFileUuid("file")
.setData("updated data")
.setDataHash("hash2")
+ .setSrcHash("123456")
.setLineHashes("foo2\nbar2")
.setUpdatedAt(DateUtils.parseDateTime("2014-10-31T16:44:02+0100").getTime()));
<dataset>
<file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" data="[null]" line_hashes="[null]" data_hash="321654988"
- created_at="123456789" updated_at="123456789"/>
+ created_at="123456789" updated_at="123456789" src_hash="123456"/>
</dataset>
build_date="2008-12-02 13:58:00.00" version="[null]" path="[null]"/>
<file_sources id="1" project_uuid="ABCD" file_uuid="GHIJ" data="[null]" line_hashes="[null]" data_hash="321654987"
- created_at="123456789" updated_at="123456789"/>
+ created_at="123456789" updated_at="123456789" src_hash="12345"/>
<file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" data="[null]" line_hashes="[null]" data_hash="321654988"
- created_at="123456789" updated_at="123456789"/>
+ created_at="123456789" updated_at="123456789" src_hash="123456"/>
</dataset>
<file_sources id="101" project_uuid="abcd" file_uuid="ab12"
data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo" data_hash="hash"
line_hashes="truc"
+ src_hash="12345"
created_at="1414597442000" updated_at="1414683842000" />
<file_sources id="102" project_uuid="prj" file_uuid="file"
data="bla bla" data_hash="hash2"
line_hashes="foo bar"
+ src_hash="hache"
created_at="1414770242000" updated_at="1414770242000" />
</dataset>
<file_sources id="101" project_uuid="abcd" file_uuid="ab12"
data="aef12a,alice,2014-04-25T12:34:56+0100,,class Foo" data_hash="hash"
line_hashes="truc"
+ src_hash="12345"
created_at="1414597442000" updated_at="1414683842000" />
</dataset>
<file_sources id="101" project_uuid="abcd" file_uuid="ab12"
data="updated data" data_hash="hash2"
line_hashes="foo2 bar2"
+ src_hash="123456"
created_at="1414597442000" updated_at="1414770242000" />