public List<ComputationStep> steps() {
List<ComputationStep> steps = Lists.newArrayList();
- steps.add(pico.getComponentByType(SynchronizeProjectPermissionsStep.class)); // project only
- steps.add(pico.getComponentByType(SwitchSnapshotStep.class)); // project & views
- steps.add(pico.getComponentByType(InvalidatePreviewCacheStep.class)); // project only
- steps.add(pico.getComponentByType(ComponentIndexationInDatabaseStep.class)); // project & views
- steps.add(pico.getComponentByType(DataCleanerStep.class)); // project & views
- steps.add(pico.getComponentByType(IndexProjectIssuesStep.class)); // project only
+ // project only
+ steps.add(pico.getComponentByType(SynchronizeProjectPermissionsStep.class));
+ // project & views
+ steps.add(pico.getComponentByType(SwitchSnapshotStep.class));
+ // project only
+ steps.add(pico.getComponentByType(InvalidatePreviewCacheStep.class));
+ // project & views
+ steps.add(pico.getComponentByType(ComponentIndexationInDatabaseStep.class));
+ // project & views
+ steps.add(pico.getComponentByType(DataCleanerStep.class));
+ // project only
+ steps.add(pico.getComponentByType(IndexProjectIssuesStep.class));
return ImmutableList.copyOf(steps);
}
Settings settings = projectSettingsFactory.newProjectSettings(projectId, session);
PurgeConfiguration purgeConfiguration = newDefaultPurgeConfiguration(projectId, settings);
- purgeTask.purge(purgeConfiguration, settings, session);
- issueIndex.deleteClosedIssuesOfProjectBefore(project.uuid(), purgeConfiguration.maxLiveDateOfClosedIssues());
+ purgeTask.purge(session, purgeConfiguration, settings);
+
+ if (purgeConfiguration.maxLiveDateOfClosedIssues() != null) {
+ issueIndex.deleteClosedIssuesOfProjectBefore(project.uuid(), purgeConfiguration.maxLiveDateOfClosedIssues());
+ }
}
@Override
}
private PropertyDto newProjectPreviewCacheProperty(ComponentDto project) {
- return new PropertyDto().setKey(PreviewCache.SONAR_PREVIEW_CACHE_LAST_UPDATE_KEY).setResourceId(project.getId())
+ return new PropertyDto()
+ .setKey(PreviewCache.SONAR_PREVIEW_CACHE_LAST_UPDATE_KEY)
+ .setResourceId(project.getId())
.setValue(String.valueOf(System.currentTimeMillis()));
}
}
public AnalysisReportDto getNextAvailableReport(DbSession session) {
- // TODO to improve – the query should return one element or null
List<AnalysisReportDto> reports = mapper(session).selectNextAvailableReport(PENDING, WORKING);
if (reports.isEmpty()) {
json.beginObject();
json.prop("id", report.getId());
json.prop("project", report.getProjectKey());
- // TODO give the project name !
json.prop("projectName", report.getProjectKey());
json.propDateTime("startedAt", report.getStartedAt());
json.propDateTime("finishedAt", report.getFinishedAt());
@Override
public void handle(Request request, Response response) {
- /*
- * TODO should be done in a specific service, not logService but maybe something like AnalysisReportHistory ? A Facade Service could be
- * needed
- */
checkUserRights();
ActivityQuery query = logService.newActivityQuery();
--- /dev/null
+/*
+ * SonarQube, open source software quality management tool.
+ * Copyright (C) 2008-2014 SonarSource
+ * mailto:contact AT sonarsource DOT com
+ *
+ * SonarQube is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * SonarQube is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+@ParametersAreNonnullByDefault
+package org.sonar.server.properties;
+
+import javax.annotation.ParametersAreNonnullByDefault;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.*;
public class DataCleanerStepTest {
this.settings = mock(ProjectSettings.class);
this.projectSettingsFactory = mock(ProjectSettingsFactory.class);
when(projectSettingsFactory.newProjectSettings(anyLong(), any(DbSession.class))).thenReturn(settings);
+ when(settings.getInt(any(String.class))).thenReturn(123);
this.sut = new DataCleanerStep(projectSettingsFactory, purgeTask, issueIndex);
}
sut.execute(mock(DbSession.class), report, project);
verify(projectSettingsFactory).newProjectSettings(anyLong(), any(DbSession.class));
- verify(purgeTask).purge(any(PurgeConfiguration.class), any(Settings.class), any(DbSession.class));
+ verify(purgeTask).purge(any(DbSession.class), any(PurgeConfiguration.class), any(Settings.class));
verify(issueIndex).deleteClosedIssuesOfProjectBefore(anyString(), any(Date.class));
}
}
this.profiler = profiler;
}
- public ProjectPurgeTask purge(PurgeConfiguration configuration, Settings settings, DbSession session) {
+ public ProjectPurgeTask purge(DbSession session, PurgeConfiguration configuration, Settings settings) {
long start = System.currentTimeMillis();
profiler.reset();
- cleanHistoricalData(configuration.rootProjectId(), settings, session);
- doPurge(configuration, session);
+ cleanHistoricalData(session, configuration.rootProjectId(), settings);
+ doPurge(session, configuration);
if (settings.getBoolean(CoreProperties.PROFILING_LOG_PROPERTY)) {
long duration = System.currentTimeMillis() - start;
LOG.info("\n -------- Profiling for purge: " + TimeUtils.formatDuration(duration) + " --------\n");
return this;
}
- private void cleanHistoricalData(long resourceId, Settings settings, DbSession session) {
+ private void cleanHistoricalData(DbSession session, long resourceId, Settings settings) {
try {
- periodCleaner.clean(resourceId, settings, session);
+ periodCleaner.clean(session, resourceId, settings);
} catch (Exception e) {
// purge errors must no fail the batch
LOG.error("Fail to clean historical data [id=" + resourceId + "]", e);
}
}
- private void doPurge(PurgeConfiguration configuration, DbSession session) {
+ private void doPurge(DbSession session, PurgeConfiguration configuration) {
try {
- purgeDao.purge(configuration, session);
+ purgeDao.purge(session, configuration);
} catch (Exception e) {
// purge errors must no fail the report analysis
LOG.error("Fail to purge data [id=" + configuration.rootProjectId() + "]", e);
}
}
- public void clean(long projectId, Settings settings, DbSession session) {
+ public void clean(DbSession session, long projectId, Settings settings) {
doClean(projectId, new Filters(settings).all(), session);
}
public PurgeDao purge(PurgeConfiguration conf) {
DbSession session = mybatis.openSession(true);
try {
- purge(conf, session);
+ purge(session, conf);
session.commit();
} finally {
MyBatis.closeQuietly(session);
return this;
}
- public void purge(PurgeConfiguration conf, DbSession session) {
+ public void purge(DbSession session, PurgeConfiguration conf) {
PurgeMapper mapper = session.getMapper(PurgeMapper.class);
PurgeCommands commands = new PurgeCommands(session, mapper, profiler);
List<ResourceDto> projects = getProjects(conf.rootProjectId(), session);
Settings settings = mock(Settings.class);
when(settings.getBoolean(CoreProperties.PROFILING_LOG_PROPERTY)).thenReturn(false);
- sut.purge(mock(PurgeConfiguration.class), settings, mock(DbSession.class));
+ sut.purge(mock(DbSession.class), mock(PurgeConfiguration.class), settings);
verify(profiler, never()).dump(anyLong(), any(Logger.class));
}
Settings settings = mock(Settings.class);
when(settings.getBoolean(CoreProperties.PROFILING_LOG_PROPERTY)).thenReturn(true);
- sut.purge(mock(PurgeConfiguration.class), settings, mock(DbSession.class));
+ sut.purge(mock(DbSession.class), mock(PurgeConfiguration.class), settings);
verify(profiler, times(1)).dump(anyLong(), any(Logger.class));
}
public void if_dao_purge_fails_it_should_not_interrupt_program_execution() throws Exception {
when(dao.purge(any(PurgeConfiguration.class))).thenThrow(NullPointerException.class);
- sut.purge(mock(PurgeConfiguration.class), mock(Settings.class), mock(DbSession.class));
+ sut.purge(mock(DbSession.class), mock(PurgeConfiguration.class), mock(Settings.class));
- verify(dao, times(1)).purge(any(PurgeConfiguration.class), any(DbSession.class));
+ verify(dao, times(1)).purge(any(DbSession.class), any(PurgeConfiguration.class));
}
@Test
public void if_profiler_cleaning_fails_it_should_not_interrupt_program_execution() throws Exception {
doThrow(NullPointerException.class).when(periodCleaner).clean(anyLong(), any(Settings.class));
- sut.purge(mock(PurgeConfiguration.class), mock(Settings.class), mock(DbSession.class));
+ sut.purge(mock(DbSession.class), mock(PurgeConfiguration.class), mock(Settings.class));
- verify(periodCleaner, times(1)).clean(anyLong(), any(Settings.class), any(DbSession.class));
+ verify(periodCleaner, times(1)).clean(any(DbSession.class), anyLong(), any(Settings.class));
}
}