Commit cf8a59cf authored by PECQUOT's avatar PECQUOT
Browse files

Merge branch 'release/3.6.2'

parents 3af3c849 22426e9e
## Sprint 72 - v3.6.2
- Aucune mise à jour de base de données
## Sprint 71 - v3.6.1
- Aucune mise à jour de base de données
......
......@@ -10,7 +10,7 @@
<groupId>fr.ifremer.reefdb</groupId>
<artifactId>reefdb</artifactId>
<version>3.6.1</version>
<version>3.6.2</version>
<packaging>pom</packaging>
<name>Reef DB</name>
......@@ -178,7 +178,7 @@
<maven.compiler.debug>true</maven.compiler.debug>
<!-- Quadrige3 Core version -->
<quadrige3-core.version>3.3.5</quadrige3-core.version>
<quadrige3-core.version>3.3.6</quadrige3-core.version>
<!-- Last ReefDb launcher version -->
<launcherVersion>3.0.3</launcherVersion>
......
......@@ -4,7 +4,7 @@
<parent>
<groupId>fr.ifremer.reefdb</groupId>
<artifactId>reefdb</artifactId>
<version>3.6.1</version>
<version>3.6.2</version>
</parent>
<artifactId>reefdb-core</artifactId>
......
......@@ -45,7 +45,6 @@ import org.hibernate.Query;
import org.hibernate.SessionFactory;
import org.hibernate.type.IntegerType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.Cache;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.stereotype.Repository;
......@@ -86,8 +85,6 @@ public class ReefDbFractionDaoImpl extends FractionDaoImpl implements ReefDbFrac
@Override
public List<FractionDTO> getAllFractions(List<String> statusCodes) {
Cache fractionById = cacheService.getCache(FRACTION_BY_ID_CACHE);
Iterator<Object[]> it = Daos.queryIteratorWithStatus(createQuery("allFractions"), statusCodes);
List<FractionDTO> result = Lists.newArrayList();
......@@ -111,8 +108,6 @@ public class ReefDbFractionDaoImpl extends FractionDaoImpl implements ReefDbFrac
fraction.addMatrixes(matrix);
matrix.addFractions(fraction);
}
fractionById.putIfAbsent(fraction.getId(), fraction);
}
return result;
......
......@@ -45,7 +45,6 @@ import org.hibernate.Query;
import org.hibernate.SessionFactory;
import org.hibernate.type.IntegerType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.Cache;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.stereotype.Repository;
......@@ -86,8 +85,6 @@ public class ReefDbMatrixDaoImpl extends MatrixDaoImpl implements ReefDbMatrixDa
@Override
public List<MatrixDTO> getAllMatrices(List<String> statusCodes) {
Cache cacheById = cacheService.getCache(MATRIX_BY_ID_CACHE);
Iterator<Object[]> it = Daos.queryIteratorWithStatus(createQuery("allMatrices"), statusCodes);
List<MatrixDTO> result = Lists.newArrayList();
......@@ -96,10 +93,7 @@ public class ReefDbMatrixDaoImpl extends MatrixDaoImpl implements ReefDbMatrixDa
MatrixDTO matrix = toMatrixDTO(Arrays.asList(source).iterator());
// add fractions
matrix.addAllFractions(fractionDao.getFractionsByMatrixId(matrix.getId()));
result.add(matrix);
cacheById.put(matrix.getId(), matrix);
}
return result;
......
......@@ -65,6 +65,8 @@ public interface ReefDbExtractionResultDao extends CSVDao {
*/
List<Integer> queryIntegerList(String query, Map<String, Object> queryBindings);
List<String> queryStringList(String query, Map<String, Object> queryBindings);
/**
* <p>queryUpdate.</p>
*
......
......@@ -12,39 +12,30 @@ package fr.ifremer.reefdb.dao.system.extraction;
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
import au.com.bytecode.opencsv.CSVWriter;
import au.com.bytecode.opencsv.ResultSetHelperService;
import fr.ifremer.quadrige3.core.dao.technical.csv.CSVDaoImpl;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;
import javax.sql.DataSource;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* Dao for extraction with JDBC access on spring data source
......@@ -87,7 +78,9 @@ public class ReefDbExtractionResultDaoImpl extends CSVDaoImpl implements ReefDbE
this.connectionProperties = connectionProperties;
}
/** {@inheritDoc} */
/**
* {@inheritDoc}
*/
@Override
public Long queryCount(String query, Map<String, Object> queryBindings) {
......@@ -105,135 +98,32 @@ public class ReefDbExtractionResultDaoImpl extends CSVDaoImpl implements ReefDbE
});
}
/** {@inheritDoc} */
/**
* {@inheritDoc}
*/
@Override
public <T> List<T> query(String sql, Map<String, Object> queryBindings, RowMapper<T> rowMapper) {
return query(connectionProperties, sql, queryBindings, rowMapper);
}
/** {@inheritDoc} */
/**
* {@inheritDoc}
*/
@Override
public int queryUpdate(String query, Map<String, Object> paramMap) {
return queryUpdate(connectionProperties, query, paramMap);
}
/** {@inheritDoc} */
/**
* {@inheritDoc}
*/
@Override
public List<Integer> queryIntegerList(String query, Map<String, Object> queryBindings) {
return query(connectionProperties, query, queryBindings, (resultSet, i) -> resultSet.getInt(1));
}
private class CsvResultSetExtractor implements ResultSetExtractor<CSVWriter> {
private static final boolean DEFAULT_TRIM = false;
private static final boolean DEFAULT_CSV_APPLY_QUOTES_TO_ALL = false;
private final CSVWriter writer;
private final boolean showColumnHeaders;
private final CsvResultSetHelperService helperService;
public CsvResultSetExtractor(CSVWriter csvWriter, boolean showColumnHeaders, Map<String, String> fieldNamesByAlias, Map<String, String> fieldAliasFormat, Set<String> floatFieldNames) {
writer = csvWriter;
helperService = new CsvResultSetHelperService(fieldNamesByAlias, fieldAliasFormat, floatFieldNames);
writer.setResultService(helperService);
this.showColumnHeaders = showColumnHeaders;
}
@Override
public CSVWriter extractData(ResultSet rs) throws SQLException {
try {
writer.writeAll(rs, showColumnHeaders, DEFAULT_TRIM, DEFAULT_CSV_APPLY_QUOTES_TO_ALL);
if (log.isDebugEnabled()) {
log.debug(String.format("%s rows written", helperService.getNbRowsWritten()));
}
} catch (IOException e) {
log.error(e.getLocalizedMessage());
}
return writer;
}
}
private class CsvResultSetHelperService extends ResultSetHelperService {
private final Map<String, String> fieldNamesByAlias;
private final Map<String, String> fieldAliasFormat;
private final Set<String> floatFieldNames;
private int nbRowsWritten = 0;
private CsvResultSetHelperService(Map<String, String> fieldNamesByAlias, Map<String, String> fieldAliasFormat, Set<String> floatFieldNames) {
this.fieldNamesByAlias = fieldNamesByAlias;
this.fieldAliasFormat = fieldAliasFormat;
this.floatFieldNames = floatFieldNames;
}
public int getNbRowsWritten() {
return nbRowsWritten;
}
@Override
public String[] getColumnNames(ResultSet rs) throws SQLException {
// handle column names
ResultSetMetaData meta = rs.getMetaData();
String[] names = new String[meta.getColumnCount()];
for (int i = 0; i < meta.getColumnCount(); i++) {
String columnName = meta.getColumnLabel(i + 1);
if (fieldNamesByAlias != null && fieldNamesByAlias.containsKey(columnName)) {
columnName = fieldNamesByAlias.get(columnName);
}
names[i] = columnName;
}
return names;
}
@Override
public String[] getColumnValues(ResultSet rs, boolean trim, String dateFormatString, String timeFormatString) throws SQLException, IOException {
nbRowsWritten++;
return super.getColumnValues(rs, trim, dateFormatString, timeFormatString);
}
@Override
protected String handleBigDecimal(ResultSet rs, int columnIndex) throws SQLException {
// handle float field
if (floatFieldNames != null && floatFieldNames.contains(rs.getMetaData().getColumnName(columnIndex))) {
Float floatValue = rs.getFloat(columnIndex);
return floatValue.toString();
}
// read result as string
String value = rs.getString(columnIndex);
if (value != null) {
BigDecimal decimal = new BigDecimal(value);
return decimal.toString();
}
return "";
}
@Override
protected String handleDate(ResultSet rs, int columnIndex, String dateFormatString) throws SQLException {
// handle date column
String columnName = rs.getMetaData().getColumnName(columnIndex);
if (fieldAliasFormat != null && fieldAliasFormat.containsKey(columnName)) {
return super.handleDate(rs, columnIndex, fieldAliasFormat.get(columnName));
}
return super.handleDate(rs, columnIndex, dateFormatString);
}
@Override
protected String handleTimestamp(ResultSet rs, int columnIndex, String timestampFormatString) throws SQLException {
// handle timestamp column
String columnName = rs.getMetaData().getColumnName(columnIndex);
if (fieldAliasFormat != null && fieldAliasFormat.containsKey(columnName)) {
return super.handleTimestamp(rs, columnIndex, fieldAliasFormat.get(columnName));
}
return super.handleTimestamp(rs, columnIndex, timestampFormatString);
}
@Override
public List<String> queryStringList(String query, Map<String, Object> queryBindings) {
return query(connectionProperties, query, queryBindings, (resultSet, i) -> resultSet.getString(1));
}
}
......@@ -47,7 +47,6 @@ import org.hibernate.type.IntegerType;
import org.hibernate.type.StringType;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.Cache;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.stereotype.Repository;
......@@ -136,7 +135,6 @@ public class ReefDbFilterDaoImpl extends FilterDaoImpl implements ReefDbFilterDa
}
Iterator<Object[]> it;
Cache cacheById = cacheService.getCache(FILTER_BY_ID_CACHE);
if (filterTypeId == null) {
it = queryIterator("filtersByContext",
......@@ -154,9 +152,7 @@ public class ReefDbFilterDaoImpl extends FilterDaoImpl implements ReefDbFilterDa
while (it.hasNext()) {
Object[] row = it.next();
FilterDTO filter = toFilterDTO(Arrays.asList(row).iterator());
result.add(filter);
cacheById.put(filter.getId(), filter);
result.add(toFilterDTO(Arrays.asList(row).iterator()));
}
return result;
}
......
......@@ -41,8 +41,10 @@ import fr.ifremer.reefdb.dto.system.extraction.ExtractionDTO;
import fr.ifremer.reefdb.dto.system.extraction.ExtractionPeriodDTO;
import fr.ifremer.reefdb.dto.system.extraction.ExtractionPmfmInfoDTO;
import fr.ifremer.reefdb.service.ReefDbBusinessException;
import fr.ifremer.reefdb.service.ReefDbDataContext;
import fr.ifremer.reefdb.service.ReefDbServiceLocator;
import fr.ifremer.reefdb.service.ReefDbTechnicalException;
import fr.ifremer.reefdb.service.administration.program.ProgramStrategyService;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.map.HashedMap;
import org.apache.commons.lang3.StringUtils;
......@@ -79,6 +81,12 @@ public class ExtractionPerformServiceImpl implements ExtractionPerformService {
@Resource
protected ReefDbConfiguration config;
@Resource(name = "reefdbDataContext")
protected ReefDbDataContext dataContext;
@Resource(name = "reefdbProgramStrategyService")
protected ProgramStrategyService programStrategyService;
@Resource(name = "reefdbExtractionService")
protected ExtractionService extractionService;
......@@ -101,7 +109,7 @@ public class ExtractionPerformServiceImpl implements ExtractionPerformService {
// ensure all filters are loaded
extractionService.loadFilteredElements(extraction);
progressionModel.setMessage("");
progressionModel.setTotal(8);
progressionModel.setTotal(9);
if (LOG.isInfoEnabled()) {
LOG.info(String.format("Beginning a %s extraction (id=%s) with:", outputType, extraction.getId()));
......@@ -175,8 +183,16 @@ public class ExtractionPerformServiceImpl implements ExtractionPerformService {
LOG.debug(String.format("list of pmfm ids to split : %s", ReefDbBeans.collectProperties(context.getPmfmInfos(), ExtractionPmfmInfoDTO.PROPERTY_PMFM_ID)));
}
// Second clean operation: remove surveys from hermetic programs if user is not allowed (Mantis #47660)
int nbRowsRemoved2 = cleanHermeticData(context);
progressionModel.increments(1);
if (LOG.isDebugEnabled() && nbRowsRemoved2 > 0) {
LOG.debug(String.format("%s rows removed from raw data which not corresponding to user rights (hermetic programs)", nbRowsRemoved2));
}
// if all the rows have been removed because of filters, there is no more data to extract
if (nbRowsRemoved == nbRowsInserted) {
if (nbRowsRemoved + nbRowsRemoved2 == nbRowsInserted) {
throw new ReefDbBusinessException(t("reefdb.service.extraction.noPmfm.error"));
}
......@@ -305,6 +321,7 @@ public class ExtractionPerformServiceImpl implements ExtractionPerformService {
queryBuilder.append("DECLARE LOCAL TEMPORARY TABLE ").append(context.getRawTableName()).append(" (");
queryBuilder.append("SURVEY_ID INTEGER, ");
queryBuilder.append("SURVEY_PROG_CD VARCHAR(40), ");
queryBuilder.append("REC_DEP_ID INTEGER, ");
queryBuilder.append("MON_LOC_NM VARCHAR(100), ");
queryBuilder.append("SURVEY_DT TIMESTAMP, ");
queryBuilder.append("SAMPLING_OPER_ID INTEGER, ");
......@@ -579,7 +596,7 @@ public class ExtractionPerformServiceImpl implements ExtractionPerformService {
// Base select
StringBuilder baseSelectBuilder = new StringBuilder();
baseSelectBuilder.append("SELECT BASE.SURVEY_ID, BASE.SURVEY_PROG_CD, BASE.MON_LOC_NM, BASE.SURVEY_DT, BASE.SAMPLING_OPER_ID, BASE.SAMPLING_OPER_LB, ");
baseSelectBuilder.append("SELECT BASE.SURVEY_ID, BASE.SURVEY_PROG_CD, BASE.REC_DEP_ID, BASE.MON_LOC_NM, BASE.SURVEY_DT, BASE.SAMPLING_OPER_ID, BASE.SAMPLING_OPER_LB, ");
baseSelectBuilder.append("BASE.MON_LOC_ID, BASE.MON_LOC_LB, MLOI.ORDER_ITEM_NM, BASE.SURVEY_POSITION_LONG, BASE.SURVEY_POSITION_LAT, ");
baseSelectBuilder.append("BASE.SAMPLING_EQUIPMENT_NM, BASE.SAMPLING_OPER_DEP_NM, ");
if (outputType != ExtractionOutputType.PAMPA) {
......@@ -605,7 +622,7 @@ public class ExtractionPerformServiceImpl implements ExtractionPerformService {
StringBuilder insertQueryBuiler = new StringBuilder();
insertQueryBuiler.append("INSERT INTO ").append(context.getRawTableName()).append(LS);
insertQueryBuiler.append("WITH BASE AS (").append(LS);
insertQueryBuiler.append("SELECT S.SURVEY_ID, PROG.PROG_CD AS SURVEY_PROG_CD, ML.MON_LOC_NM, S.SURVEY_DT,").append(LS);
insertQueryBuiler.append("SELECT S.SURVEY_ID, PROG.PROG_CD AS SURVEY_PROG_CD, S.REC_DEP_ID, ML.MON_LOC_NM, S.SURVEY_DT,").append(LS);
insertQueryBuiler.append("SO.SAMPLING_OPER_ID, SO.SAMPLING_OPER_LB, ML.MON_LOC_ID, ML.MON_LOC_LB,").append(LS);
insertQueryBuiler.append("REGEXP_SUBSTRING(SP.SURVEY_POSITION, '[0-9.-]+') AS SURVEY_POSITION_LONG,").append(LS);
insertQueryBuiler.append("TRIM(TRAILING ')' FROM REGEXP_SUBSTRING(SP.SURVEY_POSITION, '[0-9.-]+\\)$')) AS SURVEY_POSITION_LAT,").append(LS);
......@@ -784,6 +801,36 @@ public class ExtractionPerformServiceImpl implements ExtractionPerformService {
}
private int cleanHermeticData(ExtractionContextDTO context) {
// 1- first populate distinct programs from raw table
String query = "SELECT P.PROG_CD FROM PROGRAMME P WHERE P.IS_DEPARTMENT_HERMETIC = '1' AND P.PROG_CD IN " +
"(SELECT DISTINCT SURVEY_PROG_CD FROM " + context.getRawTableName() + ")";
List<String> hermeticPrograms = extractionResultDao.queryStringList(query, null);
if (CollectionUtils.isEmpty(hermeticPrograms)) {
return 0;
}
// 2- collect managed programs
int nbRemoves = 0;
Integer userId= dataContext.getRecorderPersonId();
Assert.notNull(userId);
Integer recDepId = dataContext.getRecorderDepartmentId();
Assert.notNull(recDepId);
Set<String> managedProgramCodes = programStrategyService.getManagedProgramCodesByQuserId(userId);
// 3- iterate hermetic programs and test with rec_dep_id if user is not manager
for (String hermeticProgram: hermeticPrograms) {
if (managedProgramCodes == null || !managedProgramCodes.contains(hermeticProgram)) {
query = "DELETE FROM " + context.getRawTableName() + " WHERE SURVEY_PROG_CD = '" + hermeticProgram + "' AND REC_DEP_ID != " + recDepId;
nbRemoves += extractionResultDao.queryUpdate(query, null);
}
}
return nbRemoves;
}
@SuppressWarnings("StringBufferReplaceableByString")
private void insertPmfmData(ExtractionContextDTO context, ExtractionOutputType outputType) {
......
......@@ -134,6 +134,11 @@ public class PmfmDaoReadTest extends AbstractDaoTest {
}
}
FractionDTO fraction1 = ReefDbBeans.findById(fractions, 1);
assertNotNull(fraction1);
assertEquals("Aucune", fraction1.getName());
assertEquals(4, fraction1.sizeMatrixes());
FractionDTO fraction = fractionDao.getFractionById(1);
assertNotNull(fraction);
assertEquals("Aucune", fraction.getName());
......
......@@ -4,7 +4,7 @@
<parent>
<groupId>fr.ifremer.reefdb</groupId>
<artifactId>reefdb</artifactId>
<version>3.6.1</version>
<version>3.6.2</version>
</parent>
<artifactId>reefdb-ui-swing</artifactId>
......
......@@ -31,6 +31,12 @@
</properties>
<body>
<release version="3.6.2" date="2019-06-05" description="Stable release">
<action dev="ludovic.pecquot@e-is.pro" type="fix" issue="47660">
Use hermetic programs correctly in extraction
</action>
</release>
<release version="3.6.1" date="2019-05-29" description="Stable release">
<action dev="ludovic.pecquot@e-is.pro" type="fix" issue="47548">
Synchro optimization patch regression fix (in quadrige3-core 3.3.5)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment