import org.sonar.server.db.migrations.v52.DropDependenciesComponentColumns;
import org.sonar.server.db.migrations.v52.FeedDependenciesComponentUuids;
import org.sonar.server.db.migrations.v52.FeedEventsComponentUuid;
+import org.sonar.server.db.migrations.v52.FeedFileSourcesDataType;
import org.sonar.server.db.migrations.v52.FeedProjectLinksComponentUuid;
import org.sonar.server.db.migrations.v52.MoveProjectProfileAssociation;
MoveProjectProfileAssociation.class,
AddDependenciesComponentUuidColumns.class,
FeedDependenciesComponentUuids.class,
- DropDependenciesComponentColumns.class
+ DropDependenciesComponentColumns.class,
+ FeedFileSourcesDataType.class
);
}
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+package org.sonar.server.db.migrations.v52;
+
+import org.sonar.core.persistence.Database;
+import org.sonar.server.db.migrations.BaseDataChange;
+
+import java.sql.SQLException;
+
+public class FeedFileSourcesDataType extends BaseDataChange {
+
+ public FeedFileSourcesDataType(Database db) {
+ super(db);
+ }
+
+ @Override
+ public void execute(Context context) throws SQLException {
+ context.prepareUpsert("update file_sources set data_type = 'SOURCE'").execute().commit();
+ }
+}
import javax.annotation.CheckForNull;
import javax.annotation.Nullable;
-import java.util.*;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
public class EsUtils {
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+package org.sonar.server.db.migrations.v52;
+
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.sonar.core.persistence.DbTester;
+import org.sonar.server.db.migrations.MigrationStep;
+
+public class FeedFileSourcesDataTypeTest {
+
+ @ClassRule
+ public static DbTester db = new DbTester().schema(FeedFileSourcesDataTypeTest.class, "schema.sql");
+
+ MigrationStep migration;
+
+ @Before
+ public void setUp() throws Exception {
+ db.executeUpdateSql("truncate table file_sources");
+
+ migration = new FeedFileSourcesDataType(db.database());
+ }
+
+ @Test
+ public void migrate_empty_db() throws Exception {
+ migration.execute();
+ }
+
+ @Test
+ public void migrate() throws Exception {
+ db.prepareDbUnit(this.getClass(), "migrate.xml");
+ migration.execute();
+ db.assertDbUnit(this.getClass(), "migrate-result.xml", "file_sources");
+ }
+}
assertThat(fileSourceDto.getFileUuid()).isEqualTo("FILE1_UUID");
assertThat(fileSourceDto.getCreatedAt()).isEqualTo(1500000000000L);
assertThat(fileSourceDto.getUpdatedAt()).isEqualTo(1500000000000L);
+ assertThat(fileSourceDto.getDataType()).isEqualTo(FileSourceDto.Type.SOURCE);
}
@Test
.setDataHash("FILE2_DATA_HASH")
.setLineHashes("LINE1_HASH\\nLINE2_HASH")
.setSrcHash("FILE2_HASH")
+ .setDataType(FileSourceDto.Type.SOURCE)
.setCreatedAt(1500000000000L)
.setUpdatedAt(1500000000001L));
- checkTable("insert", "file_sources", "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at");
+ checkTable("insert", "file_sources", "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at", "data_type");
}
@Test
public void update() throws Exception {
setupData("shared");
- dao.update(new FileSourceDto().setId(101L)
+ dao.update(new FileSourceDto()
+ .setId(101L)
.setProjectUuid("PRJ_UUID")
.setFileUuid("FILE1_UUID")
.setBinaryData("updated data".getBytes())
.setDataHash("NEW_DATA_HASH")
.setSrcHash("NEW_FILE_HASH")
.setLineHashes("NEW_LINE_HASHES")
+ .setDataType(FileSourceDto.Type.SOURCE)
.setUpdatedAt(1500000000002L));
- checkTable("update", "file_sources", "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at");
+ checkTable("update", "file_sources", "project_uuid", "file_uuid", "data_hash", "line_hashes", "src_hash", "created_at", "updated_at", "data_type");
}
@Test
line_hashes="lineEFGHI"
data_hash="dataEFGHI"
src_hash="srcEFGHI"
- created_at="1412952242000" updated_at="1412952242000"/>
+ created_at="1412952242000" updated_at="1412952242000" data_type="SOURCE" />
<!-- sub module -->
<projects id="4" root_id="1" kee="org.struts:struts-data" name="Struts Data"
line_hashes="lineHIJK"
data_hash="dataHIJK"
src_hash="srcHIJK"
- created_at="1412952242000" updated_at="1412952242000"/>
+ created_at="1412952242000" updated_at="1412952242000" data_type="SOURCE" />
</dataset>
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
data_hash="0263047cd758c68c27683625f072f010"
src_hash="123456"
- created_at="1412952242000" updated_at="1412952242000"/>
+ created_at="1412952242000" updated_at="1412952242000" data_type="SOURCE" />
</dataset>
<file_sources id="101" project_uuid="ABCD" file_uuid="FILE1_UUID"
binary_data="abcde" data_hash="hash" line_hashes="ABC\nDEF\nGHI" src_hash="FILE_HASH"
- created_at="1500000000000" updated_at="0"/>
+ created_at="1500000000000" updated_at="0" data_type="SOURCE" />
<file_sources id="102" project_uuid="DCBA" file_uuid="FILE2_UUID"
binary_data="edcba" data_hash="hash2" line_hashes="CBA\nFDE\nIHG" src_hash="FILE2_HASH"
- created_at="1500000000000" updated_at="0"/>
+ created_at="1500000000000" updated_at="0" data_type="SOURCE" />
</dataset>
--- /dev/null
+<dataset>
+
+ <file_sources id="1" project_uuid="project-1" file_uuid="file-1" data_type="SOURCE" created_at="123456789"
+ updated_at="456456456"/>
+ <file_sources id="2" project_uuid="project-2" file_uuid="file-2" data_type="SOURCE" created_at="123456789"
+ updated_at="456456456"/>
+ <file_sources id="3" project_uuid="project-3" file_uuid="file-3" data_type="SOURCE" created_at="123456789"
+ updated_at="456456456"/>
+
+</dataset>
--- /dev/null
+<dataset>
+
+ <file_sources id="1" project_uuid="project-1" file_uuid="file-1" data_type="[null]" created_at="123456789" updated_at="456456456" />
+ <file_sources id="2" project_uuid="project-2" file_uuid="file-2" data_type="SOURCE" created_at="123456789" updated_at="456456456" />
+ <file_sources id="3" project_uuid="project-3" file_uuid="file-3" data_type="[null]" created_at="123456789" updated_at="456456456" />
+
+</dataset>
--- /dev/null
+CREATE TABLE "FILE_SOURCES" (
+ "ID" INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 1),
+ "PROJECT_UUID" VARCHAR(50) NOT NULL,
+ "FILE_UUID" VARCHAR(50) NOT NULL,
+ "DATA_TYPE" VARCHAR(20),
+ "CREATED_AT" BIGINT NOT NULL,
+ "UPDATED_AT" BIGINT NOT NULL
+);
binary_data="abcde" data_hash="hash"
line_hashes="ABC\nDEF\nGHI"
src_hash="FILE_HASH"
- created_at="1500000000000" updated_at="1500000000000" />
+ created_at="1500000000000" updated_at="1500000000000" data_type="SOURCE" />
<file_sources id="102" project_uuid="PRJ_UUID" file_uuid="FILE2_UUID"
data_hash="FILE2_DATA_HASH"
line_hashes="LINE1_HASH\nLINE2_HASH"
src_hash="FILE2_HASH"
- created_at="1500000000000" updated_at="1500000000001" />
+ created_at="1500000000000" updated_at="1500000000001" data_type="SOURCE" />
</dataset>
binary_data="abcde" data_hash="hash"
line_hashes="ABC\nDEF\nGHI"
src_hash="FILE_HASH"
- created_at="1500000000000" updated_at="1500000000000" />
+ created_at="1500000000000" updated_at="1500000000000" data_type="SOURCE" />
</dataset>
data_hash="NEW_DATA_HASH"
line_hashes="NEW_LINE_HASHES"
src_hash="NEW_FILE_HASH"
- created_at="1500000000000" updated_at="1500000000002" />
+ created_at="1500000000000" updated_at="1500000000002" data_type="SOURCE" />
</dataset>
<!-- Updated -->
<file_sources id="101" project_uuid="ABCD" file_uuid="FILE1_UUID"
binary_data="abcde" data_hash="hash" line_hashes="ABC\nDEF\nGHI" src_hash="FILE_HASH"
- created_at="1500000000000" updated_at="1500000000002"/>
+ created_at="1500000000000" updated_at="1500000000002" data_type="SOURCE" />
<!-- Not updated because updated_at is not null -->
<file_sources id="102" project_uuid="ABCD" file_uuid="FILE2_UUID"
binary_data="abcde" data_hash="hash" line_hashes="ABC\nDEF\nGHI" src_hash="FILE_HASH"
- created_at="1500000000000" updated_at="1500000000000"/>
+ created_at="1500000000000" updated_at="1500000000000" data_type="SOURCE" />
<!-- Not updated because on another project -->
<file_sources id="103" project_uuid="BCDE" file_uuid="FILE3_UUID"
binary_data="abcde" data_hash="hash" line_hashes="ABC\nDEF\nGHI" src_hash="FILE_HASH"
- created_at="1500000000000" updated_at="0"/>
+ created_at="1500000000000" updated_at="0" data_type="SOURCE" />
</dataset>
<!-- Only this source should be updated -->
<file_sources id="101" project_uuid="ABCD" file_uuid="FILE1_UUID"
binary_data="abcde" data_hash="hash" line_hashes="ABC\nDEF\nGHI" src_hash="FILE_HASH"
- created_at="1500000000000" updated_at="0"/>
+ created_at="1500000000000" updated_at="0" data_type="SOURCE" />
<file_sources id="102" project_uuid="ABCD" file_uuid="FILE2_UUID"
binary_data="abcde" data_hash="hash" line_hashes="ABC\nDEF\nGHI" src_hash="FILE_HASH"
- created_at="1500000000000" updated_at="1500000000000"/>
+ created_at="1500000000000" updated_at="1500000000000" data_type="SOURCE" />
<file_sources id="103" project_uuid="BCDE" file_uuid="FILE3_UUID"
binary_data="abcde" data_hash="hash" line_hashes="ABC\nDEF\nGHI" src_hash="FILE_HASH"
- created_at="1500000000000" updated_at="0"/>
+ created_at="1500000000000" updated_at="0" data_type="SOURCE" />
</dataset>
<dataset>
<file_sources id="1" project_uuid="P1" file_uuid="F1" created_at="1416238020000" updated_at="1416239042000"
- binary_data="" data_hash="" />
+ binary_data="" data_hash="" data_type="SOURCE" />
<file_sources id="2" project_uuid="P2" file_uuid="F2" created_at="1416238020000" updated_at="1416239042000"
- binary_data="" data_hash="" />
+ binary_data="" data_hash="" data_type="SOURCE" />
</dataset>
<dataset>
<file_sources id="1" project_uuid="P1" file_uuid="F1" created_at="1416238020000" updated_at="1416239042000"
- binary_data="" data_hash="" />
+ binary_data="" data_hash="" data_type="SOURCE" />
<file_sources id="2" project_uuid="P1" file_uuid="F2" created_at="1416238020000" updated_at="1300000000000"
- binary_data="" data_hash="" />
+ binary_data="" data_hash="" data_type="SOURCE" />
</dataset>
"FILE_UUID" VARCHAR(50) NOT NULL,
"BINARY_DATA" BINARY(167772150),
"DATA_HASH" VARCHAR(50) NOT NULL,
+ "DATA_TYPE" VARCHAR(50),
"CREATED_AT" BIGINT NOT NULL,
"UPDATED_AT" BIGINT NOT NULL
);
<dataset>
<file_sources id="1" project_uuid="P1" file_uuid="F1" created_at="1416238020000" updated_at="1416239042000"
- binary_data="" data_hash="" />
+ binary_data="" data_hash="" data_type="SOURCE" />
</dataset>
<dataset>
<file_sources id="1" project_uuid="PROJECT_UUID" file_uuid="FILE_UUID" created_at="1416238020000" updated_at="1416239042000"
- binary_data="" data_hash="DATA_HASH" />
+ binary_data="" data_hash="DATA_HASH" data_type="SOURCE" />
</dataset>
binary_data="" data_hash="hash"
line_hashes="987654"
src_hash="12345"
- created_at="1414597442000" updated_at="1414683842000"/>
+ created_at="1414597442000" updated_at="1414683842000" data_type="SOURCE" />
</dataset>
--- /dev/null
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
+#
+# SonarQube 5.2
+# SONAR-6255
+#
+class AddFileSourcesDataType < ActiveRecord::Migration
+
+ def self.up
+ add_column 'file_sources', 'data_type', :string, :limit => 20
+ remove_index_quietly('file_sources_file_uuid_uniq')
+ add_index 'file_sources', ['file_uuid', 'data_type'], :name => 'file_sources_uuid_type', :unique => true
+ end
+
+ def self.remove_index_quietly(name)
+ begin
+ remove_index('file_sources', :name => name)
+ rescue
+ # probably already removed
+ end
+ end
+end
--- /dev/null
+#
+# SonarQube, open source software quality management tool.
+# Copyright (C) 2008-2014 SonarSource
+# mailto:contact AT sonarsource DOT com
+#
+# SonarQube is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# SonarQube is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
+#
+# SonarQube 5.2
+# SONAR-6255
+#
+class FeedFileSourcesDataType < ActiveRecord::Migration
+
+ def self.up
+ execute_java_migration('org.sonar.server.db.migrations.v52.FeedFileSourcesDataType')
+ end
+
+end
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
data_hash="0263047cd758c68c27683625f072f010"
src_hash="[null]"
- created_at="1412952242000" updated_at="1412952242000"/>
+ created_at="1412952242000" updated_at="1412952242000" data_type="SOURCE" />
</dataset>
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
data_hash="0263047cd758c68c27683625f072f010"
src_hash="123456"
- created_at="1412952242000" updated_at="1412952242000"/>
+ created_at="1412952242000" updated_at="1412952242000" data_type="SOURCE" />
</dataset>
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
data_hash="0263047cd758c68c27683625f072f010"
src_hash="123456"
- created_at="1412952242000" updated_at="1412952242000"/>
+ created_at="1412952242000" updated_at="1412952242000" data_type="SOURCE" />
</dataset>
line_hashes="8d7b3d6b83c0a517eac07e1aac94b773 9a0364b9e99bb480dd25e1f0284c8555"
data_hash="0263047cd758c68c27683625f072f010"
src_hash="123456"
- created_at="1412952242000" updated_at="1412952242000"/>
+ created_at="1412952242000" updated_at="1412952242000" data_type="SOURCE" />
<file_sources id="102" project_uuid="projectUuid" file_uuid="uuidempty" binary_data="[null]"
line_hashes="[null]"
src_hash="abcd"
- data_hash="0" created_at="1414597442000" updated_at="1414597442000"/>
+ data_hash="0" created_at="1414597442000" updated_at="1414597442000" data_type="SOURCE" />
</dataset>
*/
public class DatabaseVersion implements BatchComponent, ServerComponent {
- public static final int LAST_VERSION = 911;
+ public static final int LAST_VERSION = 913;
/**
* List of all the tables.n
private String lineHashes;
private String srcHash;
private byte[] binaryData;
+ private String dataType;
private String dataHash;
public Long getId() {
this.updatedAt = updatedAt;
return this;
}
+
+ public String getDataType() {
+ return dataType;
+ }
+
+ public FileSourceDto setDataType(String dataType) {
+ this.dataType = dataType;
+ return this;
+ }
+
+ public static class Type {
+ public final static String SOURCE = "SOURCE";
+ public final static String TEST = "TEST";
+ }
}
INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('909');
INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('910');
INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('911');
+INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('912');
+INSERT INTO SCHEMA_MIGRATIONS(VERSION) VALUES ('913');
INSERT INTO USERS(ID, LOGIN, NAME, EMAIL, CRYPTED_PASSWORD, SALT, CREATED_AT, UPDATED_AT, REMEMBER_TOKEN, REMEMBER_TOKEN_EXPIRES_AT) VALUES (1, 'admin', 'Administrator', '', 'a373a0e667abb2604c1fd571eb4ad47fe8cc0878', '48bc4b0d93179b5103fd3885ea9119498e9d161b', '1418215735482', '1418215735482', null, null);
ALTER TABLE USERS ALTER COLUMN ID RESTART WITH 2;
"FILE_UUID" VARCHAR(50) NOT NULL,
"LINE_HASHES" CLOB(2147483647),
"BINARY_DATA" BLOB(167772150),
+ "DATA_TYPE" VARCHAR(20),
"DATA_HASH" VARCHAR(50) NOT NULL,
"SRC_HASH" VARCHAR(50) NULL,
"CREATED_AT" BIGINT NOT NULL,
CREATE INDEX "FILE_SOURCES_PROJECT_UUID" ON "FILE_SOURCES" ("PROJECT_UUID");
-CREATE UNIQUE INDEX "FILE_SOURCES_FILE_UUID_UNIQ" ON "FILE_SOURCES" ("FILE_UUID");
+CREATE UNIQUE INDEX "FILE_SOURCES_UUID_TYPE_UNIQUE" ON "FILE_SOURCES" ("FILE_UUID", "DATA_TYPE");
CREATE INDEX "FILE_SOURCES_UPDATED_AT" ON "FILE_SOURCES" ("UPDATED_AT");
<select id="select" parameterType="string" resultType="org.sonar.core.source.db.FileSourceDto">
SELECT id, project_uuid as projectUuid, file_uuid as fileUuid, created_at as createdAt, updated_at as updatedAt,
- binary_data as binaryData, line_hashes as lineHashes, data_hash as dataHash, src_hash as srcHash
+ binary_data as binaryData, line_hashes as lineHashes, data_hash as dataHash, src_hash as srcHash, data_type as dataType
FROM file_sources
WHERE file_uuid = #{fileUuid}
</select>
<select id="selectHashesForProject" parameterType="string" resultType="org.sonar.core.source.db.FileSourceDto">
- SELECT id, file_uuid as fileUuid, data_hash as dataHash, src_hash as srcHash, updated_at as updatedAt
+ SELECT id, file_uuid as fileUuid, data_type as dataType, data_hash as dataHash, src_hash as srcHash, updated_at as updatedAt
FROM file_sources
WHERE project_uuid = #{projectUuid}
</select>
<insert id="insert" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false">
- INSERT INTO file_sources (project_uuid, file_uuid, created_at, updated_at, binary_data, line_hashes, data_hash, src_hash)
+ INSERT INTO file_sources (project_uuid, file_uuid, created_at, updated_at, binary_data, line_hashes, data_hash, src_hash, data_type)
VALUES (#{projectUuid,jdbcType=VARCHAR}, #{fileUuid,jdbcType=VARCHAR}, #{createdAt,jdbcType=BIGINT},
#{updatedAt,jdbcType=BIGINT}, #{binaryData,jdbcType=BLOB}, #{lineHashes,jdbcType=CLOB},
- #{dataHash,jdbcType=VARCHAR}, #{srcHash,jdbcType=VARCHAR})
+ #{dataHash,jdbcType=VARCHAR}, #{srcHash,jdbcType=VARCHAR},#{dataType,jdbcType=VARCHAR})
</insert>
<update id="update" parameterType="org.sonar.core.source.db.FileSourceDto" useGeneratedKeys="false">
binary_data = #{binaryData,jdbcType=BLOB},
line_hashes = #{lineHashes,jdbcType=CLOB},
data_hash = #{dataHash,jdbcType=VARCHAR},
- src_hash = #{srcHash,jdbcType=VARCHAR}
+ src_hash = #{srcHash,jdbcType=VARCHAR},
+ data_type = #{dataType,jdbcType=VARCHAR}
WHERE id = #{id}
</update>
<dataset>
<file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" binary_data="[null]" line_hashes="[null]" data_hash="321654988"
- created_at="123456789" updated_at="123456789" src_hash="123456"/>
+ created_at="123456789" updated_at="123456789" src_hash="123456" data_type="SOURCE" />
</dataset>
build_date="1228222680000" version="[null]" path="[null]"/>
<file_sources id="1" project_uuid="ABCD" file_uuid="GHIJ" binary_data="[null]" line_hashes="[null]" data_hash="321654987"
- created_at="123456789" updated_at="123456789" src_hash="12345"/>
+ created_at="123456789" updated_at="123456789" src_hash="12345" data_type="SOURCE" />
<file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" binary_data="[null]" line_hashes="[null]" data_hash="321654988"
- created_at="123456789" updated_at="123456789" src_hash="123456"/>
+ created_at="123456789" updated_at="123456789" src_hash="123456" data_type="SOURCE" />
</dataset>
build_date="1228222680000" version="[null]" path="[null]"/>
<file_sources id="1" project_uuid="ABCD" file_uuid="GHIJ" binary_data="[null]" line_hashes="[null]" data_hash="321654987"
- created_at="123456789" updated_at="123456789"/>
+ created_at="123456789" updated_at="123456789" data_type="SOURCE" />
<file_sources id="2" project_uuid="ABCD" file_uuid="KLMN" binary_data="[null]" line_hashes="[null]" data_hash="321654988"
- created_at="123456789" updated_at="123456789"/>
+ created_at="123456789" updated_at="123456789" data_type="SOURCE" />
</dataset>
build_date="1228222680000"
version="[null]" path="[null]"/>
<file_sources id="1" project_uuid="A" file_uuid="D" binary_data="[null]" line_hashes="[null]" data_hash="321654987"
- created_at="123456789" updated_at="123456789"/>
+ created_at="123456789" updated_at="123456789" data_type="SOURCE" />
</dataset>