/* * SonarQube * Copyright (C) 2009-2025 SonarSource SA * mailto:info AT sonarsource DOT com * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.sonar.db.source; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.google.protobuf.CodedInputStream; import com.google.protobuf.InvalidProtocolBufferException; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Collections; import java.util.List; import javax.annotation.Nullable; import net.jpountz.lz4.LZ4BlockInputStream; import net.jpountz.lz4.LZ4BlockOutputStream; import org.apache.commons.io.IOUtils; import org.sonar.db.protobuf.DbFileSources; import static com.google.common.base.Splitter.on; import static java.lang.String.format; public class FileSourceDto extends FileHashesDto { private static final String SIZE_LIMIT_EXCEEDED_EXCEPTION_MESSAGE = "Protocol message was too large. May be malicious. " + "Use CodedInputStream.setSizeLimit() to increase the size limit."; private static final Joiner LINE_RETURN_JOINER = Joiner.on('\n'); public static final Splitter LINES_HASHES_SPLITTER = on('\n'); public static final int LINE_COUNT_NOT_POPULATED = -1; private String projectUuid; private long createdAt; private String lineHashes; /** * When {@code line_count} column has been added, it's been populated with value {@link #LINE_COUNT_NOT_POPULATED -1}, * which implies all existing files sources have this value at the time SonarQube is upgraded. *
* Column {@code line_count} is populated with the correct value from every new files and for existing files as the * project they belong to is analyzed for the first time after the migration. *
* Method {@link #getLineCount()} hides this migration-only-related complexity by either returning the value
* of column {@code line_count} when its been populated, or computed the returned value from the value of column
* {@code line_hashes}.
*/
private int lineCount = LINE_COUNT_NOT_POPULATED;
private byte[] binaryData = new byte[0];
public FileSourceDto setLineHashesVersion(int lineHashesVersion) {
this.lineHashesVersion = lineHashesVersion;
return this;
}
public FileSourceDto setUuid(String uuid) {
this.uuid = uuid;
return this;
}
public String getProjectUuid() {
return projectUuid;
}
public FileSourceDto setProjectUuid(String projectUuid) {
this.projectUuid = projectUuid;
return this;
}
@Override
public FileSourceDto setFileUuid(String fileUuid) {
this.fileUuid = fileUuid;
return this;
}
/**
* MD5 of column BINARY_DATA. Used to know to detect data changes and need for update.
*/
@Override
public FileSourceDto setDataHash(String s) {
this.dataHash = s;
return this;
}
public DbFileSources.Data decodeSourceData(byte[] binaryData) {
try {
return decodeRegularSourceData(binaryData);
} catch (IOException e) {
throw new IllegalStateException(
format("Fail to decompress and deserialize source data [uuid=%s,fileUuid=%s,projectUuid=%s]", uuid, fileUuid, projectUuid),
e);
}
}
private static DbFileSources.Data decodeRegularSourceData(byte[] binaryData) throws IOException {
try (LZ4BlockInputStream lz4Input = new LZ4BlockInputStream(new ByteArrayInputStream(binaryData))) {
return DbFileSources.Data.parseFrom(lz4Input);
} catch (InvalidProtocolBufferException e) {
if (SIZE_LIMIT_EXCEEDED_EXCEPTION_MESSAGE.equals(e.getMessage())) {
return decodeHugeSourceData(binaryData);
}
throw e;
}
}
private static DbFileSources.Data decodeHugeSourceData(byte[] binaryData) throws IOException {
try (LZ4BlockInputStream lz4Input = new LZ4BlockInputStream(new ByteArrayInputStream(binaryData))) {
CodedInputStream input = CodedInputStream.newInstance(lz4Input);
input.setSizeLimit(Integer.MAX_VALUE);
return DbFileSources.Data.parseFrom(input);
}
}
/**
* Serialize and compress protobuf message {@link org.sonar.db.protobuf.DbFileSources.Data}
* in the column BINARY_DATA.
*/
public static byte[] encodeSourceData(DbFileSources.Data data) {
ByteArrayOutputStream byteOutput = new ByteArrayOutputStream();
LZ4BlockOutputStream compressedOutput = new LZ4BlockOutputStream(byteOutput);
try {
data.writeTo(compressedOutput);
compressedOutput.close();
return byteOutput.toByteArray();
} catch (IOException e) {
throw new IllegalStateException("Fail to serialize and compress source data", e);
} finally {
IOUtils.closeQuietly(compressedOutput);
}
}
/**
* Compressed value of serialized protobuf message {@link org.sonar.db.protobuf.DbFileSources.Data}
*/
public byte[] getBinaryData() {
return binaryData;
}
/**
* Set compressed value of the protobuf message {@link org.sonar.db.protobuf.DbFileSources.Data}
*/
public FileSourceDto setBinaryData(byte[] data) {
this.binaryData = data;
return this;
}
/**
* Decompressed value of serialized protobuf message {@link org.sonar.db.protobuf.DbFileSources.Data}
*/
public DbFileSources.Data getSourceData() {
return decodeSourceData(binaryData);
}
public FileSourceDto setSourceData(DbFileSources.Data data) {
this.binaryData = encodeSourceData(data);
return this;
}
/** Used by MyBatis */
public String getRawLineHashes() {
return lineHashes;
}
public void setRawLineHashes(@Nullable String lineHashes) {
this.lineHashes = lineHashes;
}
public List