Merge remote-tracking branch 'origin/develop' into develop

This commit is contained in:
2024-02-28 12:17:38 +01:00
12 changed files with 590 additions and 201 deletions

View File

@@ -108,6 +108,9 @@
<element id="library" level="project" name="Maven: com.fasterxml.jackson.core:jackson-core:2.9.4" />
<element id="library" level="project" name="Maven: com.fasterxml.jackson.core:jackson-databind:2.9.4" />
<element id="library" level="project" name="Maven: com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.9.4" />
<element id="library" level="project" name="Maven: com.alibaba:fastjson:2.0.47" />
<element id="library" level="project" name="Maven: com.alibaba.fastjson2:fastjson2-extension:2.0.47" />
<element id="library" level="project" name="Maven: com.alibaba.fastjson2:fastjson2:2.0.47" />
<element id="library" level="project" name="Maven: org.springframework:spring-aop:4.2.5.RELEASE" />
<element id="library" level="project" name="Maven: org.springframework:spring-core:4.2.5.RELEASE" />
<element id="library" level="project" name="Maven: org.springframework:spring-instrument-tomcat:4.2.5.RELEASE" />
@@ -254,7 +257,7 @@
<element id="library" level="project" name="Maven: org.reflections:reflections:0.9.10" />
<element id="library" level="project" name="Maven: org.javassist:javassist:3.19.0-GA" />
<element id="library" level="project" name="Maven: com.google.code.findbugs:annotations:2.0.1" />
<element id="library" level="project" name="Maven: com.google.code.gson:gson:2.5" />
<element id="library" level="project" name="Maven: com.google.code.gson:gson:2.9.1" />
<element id="library" level="project" name="Maven: org.apache.httpcomponents:httpclient:4.3.6" />
<element id="library" level="project" name="Maven: org.apache.httpcomponents:httpcore:4.3.3" />
<element id="library" level="project" name="Maven: commons-logging:commons-logging:1.1.3" />
@@ -721,7 +724,7 @@
</component>
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Tomcat " type="#com.intellij.j2ee.web.tomcat.TomcatRunConfigurationFactory" factoryName="Local" APPLICATION_SERVER_NAME="Tomcat" ALTERNATIVE_JRE_ENABLED="true" ALTERNATIVE_JRE_PATH="1.8" show_console_on_std_err="true" show_console_on_std_out="true">
<option name="COMMON_VM_ARGUMENTS" value="-Xms256m -Xmx1024m -XX:PermSize=1024m" />
<option name="COMMON_VM_ARGUMENTS" value="-Xms512m -Xmx2048m" />
<deployment>
<artifact name="ems-engine:war exploded">
<settings>
@@ -867,7 +870,7 @@
<method v="2" />
</configuration>
<configuration default="false" name="Tomcat (FAST)" type="#com.intellij.j2ee.web.tomcat.TomcatRunConfigurationFactory" factoryName="Local" APPLICATION_SERVER_NAME="Tomcat" ALTERNATIVE_JRE_ENABLED="false" ALTERNATIVE_JRE_PATH="1.8" show_console_on_std_err="true" show_console_on_std_out="true">
<option name="COMMON_VM_ARGUMENTS" value="-DDISABLE_DROOLS_COMPILE=TRUE -Xms256m -Xmx1024m -XX:PermSize=1024m" />
<option name="COMMON_VM_ARGUMENTS" value="-DDISABLE_DROOLS_COMPILE=TRUE -Xms512m -Xmx2048m" />
<deployment>
<artifact name="ems-engine:war exploded">
<settings>
@@ -937,7 +940,7 @@
<option name="PROGRAM_PARAMETERS" value="" />
</SHUTDOWN>
</ConfigurationWrapper>
<ConfigurationWrapper VM_VAR="JAVA_OPTS" RunnerId="JProfiler">
<ConfigurationWrapper VM_VAR="JAVA_OPTS" RunnerId="Profile">
<option name="USE_ENV_VARIABLES" value="true" />
<STARTUP>
<option name="USE_DEFAULT" value="true" />
@@ -952,7 +955,7 @@
<option name="PROGRAM_PARAMETERS" value="" />
</SHUTDOWN>
</ConfigurationWrapper>
<ConfigurationWrapper VM_VAR="JAVA_OPTS" RunnerId="Profile">
<ConfigurationWrapper VM_VAR="JAVA_OPTS" RunnerId="Profile ">
<option name="USE_ENV_VARIABLES" value="true" />
<STARTUP>
<option name="USE_DEFAULT" value="true" />
@@ -1117,6 +1120,42 @@
<root url="jar://$MAVEN_REPOSITORY$/c3p0/c3p0/0.9.1.1/c3p0-0.9.1.1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.alibaba.fastjson2:fastjson2-extension:2.0.47" type="java-imported" external-system-id="Maven">
<properties groupId="com.alibaba.fastjson2" artifactId="fastjson2-extension" version="2.0.47" />
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson2/fastjson2-extension/2.0.47/fastjson2-extension-2.0.47.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson2/fastjson2-extension/2.0.47/fastjson2-extension-2.0.47-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson2/fastjson2-extension/2.0.47/fastjson2-extension-2.0.47-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.alibaba.fastjson2:fastjson2:2.0.47" type="java-imported" external-system-id="Maven">
<properties groupId="com.alibaba.fastjson2" artifactId="fastjson2" version="2.0.47" />
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson2/fastjson2/2.0.47/fastjson2-2.0.47.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson2/fastjson2/2.0.47/fastjson2-2.0.47-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson2/fastjson2/2.0.47/fastjson2-2.0.47-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.alibaba:fastjson:2.0.47" type="java-imported" external-system-id="Maven">
<properties groupId="com.alibaba" artifactId="fastjson" version="2.0.47" />
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/2.0.47/fastjson-2.0.47.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/2.0.47/fastjson-2.0.47-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/2.0.47/fastjson-2.0.47-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.amazonservices.mws:MWSClientJavaRuntime:1.1" type="java-imported" external-system-id="Maven">
<properties groupId="com.amazonservices.mws" artifactId="MWSClientJavaRuntime" version="1.1" />
<CLASSES>
@@ -1501,16 +1540,16 @@
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.google.code.gson:gson:2.5" type="java-imported" external-system-id="Maven">
<properties groupId="com.google.code.gson" artifactId="gson" version="2.5" />
<library name="Maven: com.google.code.gson:gson:2.9.1" type="java-imported" external-system-id="Maven">
<properties groupId="com.google.code.gson" artifactId="gson" version="2.9.1" />
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/gson/gson/2.5/gson-2.5.jar!/" />
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/gson/gson/2.9.1/gson-2.9.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/gson/gson/2.5/gson-2.5-javadoc.jar!/" />
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/gson/gson/2.9.1/gson-2.9.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/gson/gson/2.5/gson-2.5-sources.jar!/" />
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/gson/gson/2.9.1/gson-2.9.1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.google.errorprone:error_prone_annotations:2.0.18" type="java-imported" external-system-id="Maven">

View File

@@ -437,6 +437,12 @@
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>2.0.47</version>
</dependency>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>

View File

@@ -37,17 +37,10 @@ public class BooleanDeserializer extends JsonDeserializer<Boolean> {
return Boolean.FALSE;
}
// if ("N".equalsIgnoreCase(text) || text.length() == 0) {
// return Boolean.FALSE;
// }
//
// if ("Y".equalsIgnoreCase(text)) {
// return Boolean.TRUE;
// }
throw ctxt.weirdStringException("", _valueClass, "only \"true\" or \"false\" recognized");
}
// Otherwise, no can do:
throw ctxt.mappingException(_valueClass);
throw ctxt.instantiationException(_valueClass, "error");
}
}

View File

@@ -1,19 +1,25 @@
package it.integry.ems.sync;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.microsoft.sqlserver.jdbc.SQLServerPreparedStatement;
import it.integry.annotations.PostContextConstruct;
import it.integry.ems.datasource.DataSource;
import it.integry.ems.json.JSONObjectMapper;
import it.integry.ems.looper.service.LooperService;
import it.integry.ems.sync.MultiDBTransaction.MultiDBTransactionManager;
import it.integry.ems.sync.dto.ExportHistoryGroupDTO;
import it.integry.ems.sync.dto.ExportHistoryItemDTO;
import it.integry.ems.sync.dto.ExportHistoryStatusDTO;
import it.integry.ems.sync.dto.PublicationDTO;
import it.integry.ems_model.base.EntityBase;
import it.integry.ems_model.base.EntityPropertyHolder;
import it.integry.ems_model.entity.StbPublications;
import it.integry.ems_model.entity.StbPublicationsDetail;
import it.integry.ems_model.entity.StbTransactionLog;
import it.integry.ems_model.types.OperationType;
import it.integry.ems_model.utility.UtilityDB;
import it.integry.ems_model.utility.UtilityLocalDate;
import javafx.util.Pair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
@@ -22,7 +28,10 @@ import org.springframework.stereotype.Component;
import java.sql.Connection;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
@Component
public class AsyncHistoryManager {
@@ -39,27 +48,36 @@ public class AsyncHistoryManager {
private JSONObjectMapper jsonObjectMapper;
//Implement check list like distribuzione
private final HashMap<Long, List<ExportHistoryItemDTO>> currentlyInExecution = new HashMap<>();
private final AtomicReference<List<ExportHistoryGroupDTO>> currentlyInExecutionG = new AtomicReference<>(new ArrayList<>());
@PostContextConstruct
public void init() {
this.looperService.add(this::consumeRetrieveQueue, 10 * 1000, "async-history-read");
this.looperService.add(this::consumeProcessedQueue, 10 * 1000, "async-history-save");
this.looperService.add(this::consumeProcessedQueue, 2 * 1000, "async-history-save");
}
public void addToExportQueue(DataSource dataSource, long groupId, PublicationDTO publication) throws Exception {
tryAddInExecutionList(groupId, new ExportHistoryItemDTO()
.setPublicationGroupId(groupId)
.setPublication(publication)
.setDataSource(dataSource));
tryAddInExecutionList(groupId, dataSource, new ExportHistoryItemDTO()
.setPublication(publication));
}
private void tryAddInExecutionList(long groupId, ExportHistoryItemDTO exportHistoryItemDTO) throws Exception {
currentlyInExecution.putIfAbsent(groupId, new ArrayList<>());
private void tryAddInExecutionList(long groupId, DataSource dataSource, ExportHistoryItemDTO exportHistoryItemDTO) throws Exception {
List<ExportHistoryItemDTO> list = currentlyInExecution.get(groupId);
ExportHistoryGroupDTO currentlyInExec = currentlyInExecutionG.get().stream()
.filter(x -> x.getGroupId() == groupId)
.findFirst().orElseGet(() ->
{
final ExportHistoryGroupDTO exportHistoryGroupDTO = new ExportHistoryGroupDTO()
.setDataSource(dataSource)
.setGroupId(groupId);
currentlyInExecutionG.get().add(exportHistoryGroupDTO);
return exportHistoryGroupDTO;
});
List<ExportHistoryItemDTO> list = currentlyInExec.getItems();
final boolean alreadyRegistered = list.stream()
.anyMatch(x -> x.getPublication().getId() == exportHistoryItemDTO.getPublication().getId());
@@ -67,164 +85,181 @@ public class AsyncHistoryManager {
throw new Exception("Il sistema sta già elaborando questa publication");
}
exportHistoryItemDTO.setTotalItemCount(countExistingItems(exportHistoryItemDTO));
currentlyInExec.setTotalItemCount(currentlyInExec.getTotalItemCount() + countExistingItems(dataSource, exportHistoryItemDTO));
list.add(exportHistoryItemDTO);
}
private void consumeRetrieveQueue() {
final List<Long> currentlyInExecutionKeys = new ArrayList<>(currentlyInExecution.keySet());
for (long currentGroupId : currentlyInExecutionKeys) {
List<ExportHistoryItemDTO> entitiesToExport = currentlyInExecution.get(currentGroupId);
for (ExportHistoryGroupDTO currentGroup : currentlyInExecutionG.get()) {
List<ExportHistoryItemDTO> entitiesToExport = currentGroup.getItems();
if (currentGroup.getStartTime() != null)
continue;
currentGroup.setStartTime(UtilityLocalDate.getNowTime());
for (ExportHistoryItemDTO entityToExport : entitiesToExport) {
try {
internalExportEntity(entityToExport);
internalExportEntity(currentGroup, entityToExport);
} catch (Exception ex) {
ex.printStackTrace();
logger.error(ex.getMessage(), ex);
}
}
currentGroup.setCompletedRead(true);
}
}
private void internalExportEntity(ExportHistoryItemDTO entityHistoryToExport) throws Exception {
private void internalExportEntity(ExportHistoryGroupDTO exportHistoryGroup, ExportHistoryItemDTO entityHistoryToExport) throws Exception {
if (entityHistoryToExport.getStartDate() == null)
entityHistoryToExport.setStartDate(UtilityLocalDate.getNowTime());
final Class<? extends EntityBase> entityClass = entityPropertyHolder.getEntityClassFromTableName(entityHistoryToExport.getPublication().getEntityName());
try (MultiDBTransactionManager newConnection = new MultiDBTransactionManager(entityHistoryToExport.getDataSource().getProfile())) {
retrieveEntitiesByChunk(newConnection.getPrimaryConnection(), entityHistoryToExport, entityClass);
try (MultiDBTransactionManager newConnection = new MultiDBTransactionManager(exportHistoryGroup.getDataSource().getProfile())) {
retrieveEntitiesByChunk(newConnection.getPrimaryConnection(), exportHistoryGroup, entityHistoryToExport, entityClass);
}
}
private long countExistingItems(ExportHistoryItemDTO entityHistoryToExport) throws Exception {
private long countExistingItems(DataSource dataSource, ExportHistoryItemDTO entityHistoryToExport) throws Exception {
final Class<? extends EntityBase> entityClass = entityPropertyHolder.getEntityClassFromTableName(entityHistoryToExport.getPublication().getEntityName());
try (MultiDBTransactionManager newConnection = new MultiDBTransactionManager(entityHistoryToExport.getDataSource().getProfile())) {
try (MultiDBTransactionManager newConnection = new MultiDBTransactionManager(dataSource.getProfile())) {
String sql = "SELECT CONVERT(BIGINT, COUNT(*)) FROM " + entityClass.newInstance().getTableName();
return UtilityDB.executeSimpleQueryOnlyFirstRowFirstColumn(newConnection.getPrimaryConnection(), sql);
}
}
private void retrieveEntitiesByChunk(Connection connection, ExportHistoryItemDTO exportHistoryItem, Class<? extends EntityBase> entityClass) throws Exception {
private void retrieveEntitiesByChunk(Connection connection, ExportHistoryGroupDTO exportHistoryGroup, ExportHistoryItemDTO exportHistoryItem, Class<? extends EntityBase> entityClass) throws Exception {
long totalItemCount = exportHistoryItem.getTotalItemCount();
int chunkSize = 10000;
String innerSql = "SELECT * " +
"FROM " + exportHistoryItem.getPublication().getEntityName();
innerSql = UtilityDB.addwhereCond(innerSql, exportHistoryItem.getPublication().getWhereCond(), true);
for (int chunkIndex = 0; chunkIndex < (totalItemCount / chunkSize) + 1; chunkIndex++) {
UtilityDB.executeSimpleQueryDTOAsync(connection, innerSql, entityClass, data -> {
while (exportHistoryItem.getToProcessQueue().size() > 30000) {
Thread.sleep(10 * 1000);
exportHistoryGroup.setProcessedItemCount(exportHistoryGroup.getProcessedItemCount() + 1);
// exportHistoryItem.getToProcessQueue().add(JSON.toJSONString(data));
try {
exportHistoryGroup.getToProcessQueue().add(new Pair<>(exportHistoryItem.getPublication().getEntityName(), jsonObjectMapper.writeValueAsString(data)));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
String innerSql = "SELECT *, ROW_NUMBER() OVER (ORDER BY (SELECT NULL)) AS row_number " +
"FROM " + exportHistoryItem.getPublication().getEntityName();
innerSql = UtilityDB.addwhereCond(innerSql, exportHistoryItem.getPublication().getWhereCond(), true);
String sql = "WITH TempResult AS (\n" +
innerSql + ")\n" +
"SELECT TempResult.*\n" +
"FROM TempResult\n" +
"ORDER BY row_number\n" +
"OFFSET (" + chunkIndex + ") * " + chunkSize + " ROWS FETCH NEXT " + chunkSize + " ROWS ONLY";
final List<? extends EntityBase> selectedChunk = UtilityDB.executeSimpleQueryDTO(connection, sql, entityClass);
if (selectedChunk == null || selectedChunk.isEmpty()) continue;
exportHistoryItem.setProcessedItemCount(exportHistoryItem.getProcessedItemCount() + selectedChunk.size());
//logger.debug("COUNT: " + exportHistoryItem.getProcessedItemCount());
selectedChunk.forEach(x -> exportHistoryItem.getToProcessQueue().add(x));
}
while ((float) (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / Runtime.getRuntime().totalMemory() > 0.9f) {
logger.debug("Memory limit reached: " + (((float) (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / Runtime.getRuntime().totalMemory()) * 100) + "%");
try {
Thread.sleep(1 * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
});
}
private void consumeProcessedQueue() {
final List<Long> currentlyInExecutionKeys = new ArrayList<>(currentlyInExecution.keySet());
List<ExportHistoryGroupDTO> list = currentlyInExecutionG.get();
for (int i = 0; i < list.size(); i++) {
ExportHistoryGroupDTO currentGroup = list.get(i);
try {
internalProcessQueue(currentGroup);
for (long currentGroupId : currentlyInExecutionKeys) {
List<ExportHistoryItemDTO> entitiesToExport = currentlyInExecution.get(currentGroupId);
if (currentGroup.isCompletedRead() && currentGroup.getSyncronizedItemCount() >= currentGroup.getTotalItemCount() && currentGroup.getEndTime() == null) {
currentGroup.setEndTime(UtilityLocalDate.getNowTime());
updateReadyToTransmit(currentGroup);
list.remove(i--);
}
for (ExportHistoryItemDTO entityToExport : entitiesToExport) {
try {
internalProcessQueue(entityToExport);
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
currentGroup.getDataSource().getConnection().commit();
} catch (Exception ex) {
ex.printStackTrace();
logger.error(ex.getMessage(), ex);
}
}
}
private void internalProcessQueue(ExportHistoryGroupDTO currentGroup) throws Exception {
String insertSQL = "INSERT INTO " + StbTransactionLog.ENTITY + " (publication_group_id, created_at, user_name, entities, entities_json, group_id)" +
" VALUES (?, ?, ?, ?, ?, ?)";
try (MultiDBTransactionManager multiDBTransactionManager = new MultiDBTransactionManager(currentGroup.getDataSource().getProfile())) {
while (!currentGroup.getToProcessQueue().isEmpty()) {
try (SQLServerPreparedStatement insertBulkPs = (SQLServerPreparedStatement) multiDBTransactionManager.prepareStatement(insertSQL)) {
Pair<String, String> entityJson;
while ((entityJson = currentGroup.getToProcessQueue().poll()) != null) {
insertBulkPs.setLong(1, currentGroup.getGroupId());
insertBulkPs.setObject(2, UtilityLocalDate.getNowTime());
insertBulkPs.setString(3, null);
insertBulkPs.setString(4, entityJson.getKey());
insertBulkPs.setString(5, entityJson.getValue());
insertBulkPs.setLong(6, -1);
insertBulkPs.addBatch();
currentGroup.setSyncronizedItemCount(currentGroup.getSyncronizedItemCount() + 1);
}
insertBulkPs.executeLargeBatch();
System.gc();
}
}
}
}
private void internalProcessQueue(ExportHistoryItemDTO entityToExport) throws Exception {
//logger.debug("TBS COUNT: " + entityToExport.getToProcessQueue().size());
if (entityToExport.getStartDate() == null) entityToExport.setStartDate(UtilityLocalDate.getNowTime());
private void updateReadyToTransmit(ExportHistoryGroupDTO exportHistoryGroup) throws Exception {
StbPublications stbPublications = new StbPublications()
.setId(exportHistoryGroup.getGroupId());
stbPublications.setOperation(OperationType.NO_OP);
try (MultiDBTransactionManager multiDBTransactionManager = new MultiDBTransactionManager(entityToExport.getDataSource().getProfile())) {
stbPublications.setStbPublicationsDetails(exportHistoryGroup.getItems().stream()
.map(x -> {
StbPublicationsDetail stbPublicationsDetail = new StbPublicationsDetail()
.setId(x.getPublication().getId())
.setReadyToTransmit(true);
EntityBase entity;
while ((entity = entityToExport.getToProcessQueue().poll()) != null) {
stbPublicationsDetail.setOperation(OperationType.UPDATE);
return stbPublicationsDetail;
})
.collect(Collectors.toList()));
StbTransactionLog stbTransactionLog = new StbTransactionLog()
.setPublicationGroupId(entityToExport.getPublicationGroupId())
.setCreatedAt(UtilityLocalDate.getNowTime())
.setEntitiesJson(jsonObjectMapper.writeValueAsString(entity))
.setGroupId(-1)
.setEntities(entityToExport.getPublication().getEntityName());
stbTransactionLog
.setOperation(OperationType.INSERT);
stbTransactionLog.manageWithParentConnection(multiDBTransactionManager.getPrimaryConnection());
entityToExport.setSyncronizedItemCount(entityToExport.getSyncronizedItemCount() + 1);
}
}
stbPublications.manageWithParentConnection(exportHistoryGroup.getDataSource().getConnection());
}
public List<ExportHistoryStatusDTO> getStatus() {
List<ExportHistoryStatusDTO> statusList = new ArrayList<>();
HashMap<Long, HashMap<String, Object>> total = new HashMap<>();
for (ExportHistoryGroupDTO currentGroup : currentlyInExecutionG.get()) {
for (Long key : currentlyInExecution.keySet()) {
final LocalDateTime startDate = currentlyInExecution.get(key).stream()
.map(ExportHistoryItemDTO::getStartDate)
.filter(Objects::nonNull)
.min(Comparator.naturalOrder())
.orElse(null);
final LocalDateTime startDate = currentGroup.getStartTime();
long totalCount = 0;
long processedCount = 0;
long itemsPerSec = 1;
LocalDateTime estimatedEnd = null;
LocalDateTime estimatedEnd = currentGroup.getEndTime();
if (startDate != null) {
totalCount = currentlyInExecution.get(key).stream()
.map(ExportHistoryItemDTO::getTotalItemCount)
.reduce(0L, Long::sum);
totalCount = currentGroup.getTotalItemCount();
processedCount = currentGroup.getSyncronizedItemCount();
processedCount = currentlyInExecution.get(key).stream()
.map(ExportHistoryItemDTO::getSyncronizedItemCount)
.reduce(0L, Long::sum);
final long secondsBetween = ChronoUnit.SECONDS.between(startDate, UtilityLocalDate.getNowTime());
final long secondsBetween = ChronoUnit.SECONDS.between(startDate, estimatedEnd != null ? estimatedEnd : UtilityLocalDate.getNowTime());
itemsPerSec = processedCount / (secondsBetween == 0 ? 1 : secondsBetween);
//if(itemsPerSec == 0) itemsPerSec = 1 / 60;
double secsToEnd = (double) totalCount / (itemsPerSec == 0 ? 1 : itemsPerSec);
estimatedEnd = startDate.plusSeconds((long) secsToEnd);
}
statusList.add(new ExportHistoryStatusDTO()
.setPublicationGroupId(key)
.setPublicationGroupId(currentGroup.getGroupId())
.setStartedAt(startDate)
.setTotalCount(totalCount)
.setProcessedCount(processedCount)

View File

@@ -1,11 +1,27 @@
package it.integry.ems.sync.dto;
import it.integry.ems.datasource.DataSource;
import javafx.util.Pair;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
public class ExportHistoryGroupDTO {
private DataSource dataSource;
private long groupId;
private List<ExportHistoryItemDTO> items;
private LocalDateTime startTime;
private LocalDateTime endTime;
private boolean completedRead;
private long totalItemCount;
private long processedItemCount;
private long syncronizedItemCount;
private List<ExportHistoryItemDTO> items = new ArrayList<>();
private final Queue<Pair<String, String>> toProcessQueue = new ConcurrentLinkedQueue<>();
public long getGroupId() {
@@ -25,4 +41,72 @@ public class ExportHistoryGroupDTO {
this.items = items;
return this;
}
public Queue<Pair<String, String>> getToProcessQueue() {
return toProcessQueue;
}
public LocalDateTime getStartTime() {
return startTime;
}
public ExportHistoryGroupDTO setStartTime(LocalDateTime startTime) {
this.startTime = startTime;
return this;
}
public DataSource getDataSource() {
return dataSource;
}
public ExportHistoryGroupDTO setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
return this;
}
public long getTotalItemCount() {
return totalItemCount;
}
public ExportHistoryGroupDTO setTotalItemCount(long totalItemCount) {
this.totalItemCount = totalItemCount;
return this;
}
public long getProcessedItemCount() {
return processedItemCount;
}
public ExportHistoryGroupDTO setProcessedItemCount(long processedItemCount) {
this.processedItemCount = processedItemCount;
return this;
}
public long getSyncronizedItemCount() {
return syncronizedItemCount;
}
public ExportHistoryGroupDTO setSyncronizedItemCount(long syncronizedItemCount) {
this.syncronizedItemCount = syncronizedItemCount;
return this;
}
public boolean isCompletedRead() {
return completedRead;
}
public ExportHistoryGroupDTO setCompletedRead(boolean completedRead) {
this.completedRead = completedRead;
return this;
}
public LocalDateTime getEndTime() {
return endTime;
}
public ExportHistoryGroupDTO setEndTime(LocalDateTime endTime) {
this.endTime = endTime;
return this;
}
}

View File

@@ -1,41 +1,12 @@
package it.integry.ems.sync.dto;
import it.integry.ems.datasource.DataSource;
import it.integry.ems_model.base.EntityBase;
import java.time.LocalDateTime;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
public class ExportHistoryItemDTO {
private long publicationGroupId;
private DataSource dataSource;
private PublicationDTO publication;
private LocalDateTime startDate;
private long totalItemCount;
private long processedItemCount;
private long syncronizedItemCount;
private Queue<EntityBase> toProcessQueue = new ConcurrentLinkedQueue<EntityBase>();
public long getPublicationGroupId() {
return publicationGroupId;
}
public ExportHistoryItemDTO setPublicationGroupId(long publicationGroupId) {
this.publicationGroupId = publicationGroupId;
return this;
}
public DataSource getDataSource() {
return dataSource;
}
public ExportHistoryItemDTO setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
return this;
}
public PublicationDTO getPublication() {
return publication;
@@ -54,40 +25,4 @@ public class ExportHistoryItemDTO {
this.startDate = startDate;
return this;
}
public long getTotalItemCount() {
return totalItemCount;
}
public ExportHistoryItemDTO setTotalItemCount(long totalItemCount) {
this.totalItemCount = totalItemCount;
return this;
}
public long getProcessedItemCount() {
return processedItemCount;
}
public ExportHistoryItemDTO setProcessedItemCount(long processedItemCount) {
this.processedItemCount = processedItemCount;
return this;
}
public Queue<EntityBase> getToProcessQueue() {
return toProcessQueue;
}
public ExportHistoryItemDTO setToProcessQueue(Queue<EntityBase> toProcessQueue) {
this.toProcessQueue = toProcessQueue;
return this;
}
public long getSyncronizedItemCount() {
return syncronizedItemCount;
}
public ExportHistoryItemDTO setSyncronizedItemCount(long syncronizedItemCount) {
this.syncronizedItemCount = syncronizedItemCount;
return this;
}
}

View File

@@ -5,11 +5,10 @@ import it.integry.ems.sync.AsyncHistoryManager;
import it.integry.ems.sync.MultiDBTransaction.MultiDBTransactionManager;
import it.integry.ems.sync.dto.ExportHistoryStatusDTO;
import it.integry.ems.sync.dto.PublicationDTO;
import it.integry.ems.utility.UtilityEntity;
import it.integry.ems_model.base.EntityBase;
import it.integry.ems_model.base.EntityPropertyHolder;
import it.integry.ems_model.entity.StbPublicationsDetail;
import it.integry.ems_model.utility.Query;
import it.integry.ems_model.utility.UtilityDB;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
@@ -37,11 +36,12 @@ public class RemoteSynchronizationService {
private EntityProcessor entityProcessor;
public void startPublication(long groupId) throws Exception {
StbPublicationsDetail tmpStbPublicationsDetail = new StbPublicationsDetail();
String whereCond = Query.format("stb_publication_id = %s AND export_history = %s", groupId, 1);
String sql = Query.format("SELECT * FROM " + StbPublicationsDetail.ENTITY + " WHERE stb_publication_id = %s AND export_history = %s", groupId, 1);
final List<? extends EntityBase> select = tmpStbPublicationsDetail.select(multiDBTransactionManager.getPrimaryConnection(), whereCond);
final List<StbPublicationsDetail> stbPublicationsDetails = UtilityEntity.toCustomEntity(select);
final List<StbPublicationsDetail> stbPublicationsDetails = UtilityDB.executeSimpleQueryDTO(multiDBTransactionManager.getPrimaryConnection(), sql, StbPublicationsDetail.class);
if(stbPublicationsDetails == null)
throw new Exception("Non è stata trovata alcuna pubblicazione da poter avviare (ID: " + groupId + ")");
for (StbPublicationsDetail stbPublicationsDetail : stbPublicationsDetails) {
asyncHistoryManager.addToExportQueue(multiDBTransactionManager.getPrimaryDatasource(),

View File

@@ -79,7 +79,7 @@ public class RemoteSynchronizationSetupService {
.collect(toList());
publicationGroupDTO.setPublications(publications);
boolean atLeastOneActiveToExport = publications.stream().anyMatch(publicationDTO -> publicationDTO.getExportHistory() && publicationDTO.getActive());
boolean atLeastOneActiveToExport = publications.stream().anyMatch(publicationDTO -> publicationDTO.getExportHistory() && publicationDTO.getActive() && !publicationDTO.getReadyToTransmit());
boolean alreadyExporting = currentExportStatus.stream().anyMatch(x -> x.getPublicationGroupId() == publicationGroupDTO.getId());
publicationGroupDTO.setCanStartExport(atLeastOneActiveToExport && !alreadyExporting);
@@ -113,7 +113,7 @@ public class RemoteSynchronizationSetupService {
.setActive(publicationToInsert.getActive())
.setWhereCondField(publicationToInsert.getWhereCond())
.setExportHistory(publicationToInsert.getExportHistory())
.setReadyToTransmit(!publicationToInsert.getReadyToTransmit());
.setReadyToTransmit(!publicationToInsert.getExportHistory());
stbPublicationsDetail.setOperation(OperationType.INSERT);
StbPublications stbPublications = new StbPublications()
@@ -190,7 +190,7 @@ public class RemoteSynchronizationSetupService {
stbPublicationsDetail = entityProcessor.processEntity(stbPublicationsDetail, multiDBTransactionManager);
if(publicationDTO.getExportHistory() != null)
if(publicationDTO.getExportHistory() != null && !stbPublicationsDetail.isReadyToTransmit())
stbPublicationsDetail.setExportHistory(publicationDTO.getExportHistory());
if(publicationDTO.getActive() != null)

View File

@@ -0,0 +1,282 @@
package it.integry.ems_model.db;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.microsoft.sqlserver.jdbc.SQLServerResultSet;
import it.integry.ems.expansion.RunnableArgs;
import it.integry.ems.expansion.RunnableArgsWithReturn;
import it.integry.ems.utility.UtilityDebug;
import it.integry.ems_model.ImportExport.Interfaces.IExportFormat;
import it.integry.ems_model.ImportExport.Interfaces.IImportFormat;
import it.integry.ems_model.annotation.Clob;
import it.integry.ems_model.annotation.MapToTable;
import it.integry.ems_model.annotation.SqlField;
import it.integry.ems_model.base.EntityBase;
import it.integry.ems_model.exception.DataConverterNotFoundException;
import it.integry.ems_model.resolver.SqlFieldHolder;
import it.integry.ems_model.types.OperationType;
import it.integry.ems_model.utility.UtilityDB;
import it.integry.ems_model.utility.UtilityString;
import org.apache.commons.codec.binary.Base64;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import javax.validation.constraints.NotNull;
import java.lang.reflect.Field;
import java.lang.reflect.Type;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.*;
public class AsyncResultSetMapper {
private final Logger logger = LogManager.getLogger();
@NotNull
public static List<HashMap<String, Object>> mapResultSetToHashMap(ResultSet rs) throws SQLException {
ResultSetMetaData md = rs.getMetaData();
int columns = md.getColumnCount();
List<HashMap<String, Object>> list = new ArrayList<HashMap<String, Object>>();
while (rs.next()) {
HashMap<String, Object> row = new HashMap<String, Object>(columns);
for (int i = 1; i <= columns; ++i) {
row.put(md.getColumnName(i), rs.getObject(i));
}
list.add(row);
}
return list;
}
public static Map.Entry<String[], List<Object[]>> mapResultSetToArray(ResultSet rs) throws SQLException {
ResultSetMetaData md = rs.getMetaData();
int columns = md.getColumnCount();
String[] columnNames = new String[columns];
for (int i = 1; i <= columns; ++i) {
columnNames[i - 1] = md.getColumnName(i).toLowerCase();
}
List<Object[]> rowsValues = new ArrayList<>();
while (rs.next()) {
Object[] singleRowValues = new Object[columns];
for (int i = 1; i <= columns; ++i) {
singleRowValues[i - 1] = rs.getObject(i);
}
rowsValues.add(singleRowValues);
}
rs.close();
return new AbstractMap.SimpleEntry<>(columnNames, rowsValues);
}
public <T> void mapResultSetToList(SQLServerResultSet rs, Class<T> outputClass, RunnableArgs<T> onElementRetrieved) throws SQLException, DataConverterNotFoundException, InstantiationException, IllegalAccessException {
mapResultSetToList(rs, outputClass, null, true, onElementRetrieved);
}
public <T> void mapResultSetToList(SQLServerResultSet rs, Class<T> outputClass, OperationType operation, RunnableArgs<T> onElementRetrieved)
throws SQLException, DataConverterNotFoundException, InstantiationException, IllegalAccessException {
mapResultSetToList(rs, outputClass, operation, true, onElementRetrieved);
}
public <T> void mapResultSetToList(SQLServerResultSet rs, Class<T> outputClass, OperationType operation, Boolean onlyPKMaster, RunnableArgs<T> onElementRetrieved) throws SQLException, DataConverterNotFoundException, InstantiationException, IllegalAccessException {
if (rs != null) {
Field[] fields;
if (IExportFormat.class.isAssignableFrom(outputClass) || IImportFormat.class.isAssignableFrom(outputClass)) {
fields = outputClass.getSuperclass().getDeclaredFields();
} else {
fields = outputClass.getDeclaredFields();
}
internalMappingList(rs, outputClass, fields, onElementRetrieved);
// if (outputList != null && !outputList.isEmpty() && EntityBase.class.isAssignableFrom(outputClass)) {
// for (T bean : outputList) {
// if (operation != null) {
// ((EntityBase) bean).setOperation(operation);
// }
//
// if (onlyPKMaster != null) {
// ((EntityBase) bean).setOnlyPkMaster(onlyPKMaster);
// }
// }
// }
}
// return outputList != null && !outputList.isEmpty() ? outputList : null;
}
private <T> void internalMappingList(SQLServerResultSet resultSet, Class<T> outputClass, Field[] fields, RunnableArgs<T> onElementRetrieved) throws SQLException, DataConverterNotFoundException, InstantiationException, IllegalAccessException {
final HashMap<Integer, RunnableArgsWithReturn<Object, Object>> columnToConverterMap = new HashMap<>();
final HashMap<Integer, Field> columnToFieldMap = new HashMap<>();
final ResultSetMetaData metaData = resultSet.getMetaData();
for (int i = 0; i < metaData.getColumnCount(); i++) {
String columnName = metaData.getColumnName(i + 1).toLowerCase();
final Field field = Arrays.stream(fields)
.filter(x -> x.isAnnotationPresent(SqlField.class) && x.getAnnotation(SqlField.class).value().equalsIgnoreCase(columnName))
.findFirst().orElse(null);
if (field == null)
continue;
field.setAccessible(true);
final int columnType = metaData.getColumnType(i + 1);
final RunnableArgsWithReturn<Object, Object> converter = SqlFieldHolder.getConverter(UtilityDB.sqlTypeToJavaClass(columnType), field.getType());
columnToConverterMap.put(i + 1, converter);
columnToFieldMap.put(i + 1, field);
}
while (resultSet.next()) {
final T outputObject = outputClass.newInstance();
for (Integer columnIndex : columnToConverterMap.keySet()) {
Object obj = resultSet.getObject(columnIndex);
Object convertedObject = columnToConverterMap.get(columnIndex).run(obj);
columnToFieldMap.get(columnIndex).set(outputObject, convertedObject);
}
onElementRetrieved.run(outputObject);
}
//
// final Map.Entry<String[], List<Object[]>> listPair = mapResultSetToArray(resultSet);
//
// List<String> columnNames = Arrays.asList(listPair.getKey());
// List<Object[]> columnValuesList = listPair.getValue();
//
// if (columnValuesList == null || columnValuesList.isEmpty())
// return null;
//
// List<Field> fieldsWithAnnotations = Stream.of(fields)
// .filter(x -> x.isAnnotationPresent(SqlField.class) || x.isAnnotationPresent(JsonProperty.class) || x.isAnnotationPresent(MapToTable.class))
// .toList();
//
// if (fieldsWithAnnotations.isEmpty()) {
// System.out.println("Attenzione nella classe non sono presenti elementi con le seguenti annotazioni @SqlField, @JsonProperty, @MapToTable. Nessun campo sarà mappato nella classe");
// }
//
//
// final List<ResultSetMappingField> resultSetMappingFields = scanResultSetFields(fields, columnNames, columnTypesFromResultSet);
// final HashMap<Field, List<ResultSetMappingField>> entityChildsResultSetMappingField = new HashMap<>();
//
// //Searching entities
// List<Field> entityFields = fieldsWithAnnotations.stream()
// .filter(x -> EntityBase.class.isAssignableFrom(x.getType()))
// .collect(Collectors.toList());
//
// for (Field entityField : entityFields) {
// Field[] entityChildFields = entityField.getType().getDeclaredFields();
// entityChildsResultSetMappingField.put(entityField, scanResultSetFields(entityChildFields, columnNames, columnTypesFromResultSet));
// }
//
//
//
// for (int i = 0; i < listPair.getValue().size(); i++) {
// T bean = outputClass.newInstance();
// Object[] columnValues = columnValuesList.get(i);
//
// for (ResultSetMappingField resultSetMappingField : resultSetMappingFields) {
// Object columnValue = columnValues[resultSetMappingField.getColumnIndex()];
//
// if (resultSetMappingField.getConverter() != null && columnValue != null) {
// columnValue = resultSetMappingField.getConverter().run(columnValue);
// resultSetMappingField.getField().set(bean, columnValue);
// }
// }
//
// for (Field entityChildResultSetMappingField : entityChildsResultSetMappingField.keySet()) {
// EntityBase entityChildBean = (EntityBase) entityChildResultSetMappingField.getType().newInstance();
// entityChildBean.setOperation(OperationType.SELECT_OBJECT);
// entityChildResultSetMappingField.set(bean, entityChildBean);
//
// List<ResultSetMappingField> entityChildResultSetMappingFieldList = entityChildsResultSetMappingField.get(entityChildResultSetMappingField);
//
// for (ResultSetMappingField entityChildResultSetMapping : entityChildResultSetMappingFieldList) {
// Object columnValue = columnValues[entityChildResultSetMapping.getColumnIndex()];
//
// if (entityChildResultSetMapping.getConverter() != null && columnValue != null) {
// columnValue = entityChildResultSetMapping.getConverter().run(columnValue);
// entityChildResultSetMapping.getField().set(entityChildBean, columnValue);
// }
// }
//
// }
//
// values.add(bean);
// }
//
// return values;
}
private List<ResultSetMappingField> scanResultSetFields(Field[] fields, List<String> columnNames, HashMap<String, Type> sqlTypes) throws DataConverterNotFoundException {
final List<ResultSetMappingField> resultSetMappingFields = new ArrayList<>();
for (Field field : fields) {
field.setAccessible(true);
if (!field.isAnnotationPresent(SqlField.class) && !field.isAnnotationPresent(JsonProperty.class) && !field.isAnnotationPresent(MapToTable.class)) {
continue;
}
String columnName;
if (field.isAnnotationPresent(SqlField.class)) {
SqlField column = field.getAnnotation(SqlField.class);
columnName = !UtilityString.isNullOrEmpty(column.value()) ? column.value() : field.getName();
} else if (field.isAnnotationPresent(MapToTable.class)) {
columnName = field.getAnnotation(MapToTable.class).value();
} else if (field.isAnnotationPresent(JsonProperty.class)) {
columnName = field.getAnnotation(JsonProperty.class).value();
} else continue;
if (EntityBase.class.isAssignableFrom(field.getType()))
continue;
columnName = columnName.toLowerCase();
Type sqlType = sqlTypes.get(columnName);
Type entityType = field.getGenericType();
if (sqlType == null)
continue;
RunnableArgsWithReturn<Object, Object> converter;
if (field.isAnnotationPresent(Clob.class)) {
converter = data -> Base64.encodeBase64String(data.toString().getBytes());
} else
converter = SqlFieldHolder.getConverter(sqlType, entityType);
if (converter == null) {
if (UtilityDebug.isDebugExecution() || UtilityDebug.isIntegryServer())
throw new DataConverterNotFoundException(columnName, sqlType.getTypeName(), entityType.getTypeName());
logger.warn(String.format("Converter non trovato per colonna %s, sqlType %s -> dtoType %s", columnName, sqlType.getTypeName(), entityType.getTypeName()));
}
resultSetMappingFields.add(new ResultSetMappingField()
.setField(field)
.setColumnName(columnName)
.setFieldName(field.getName())
.setConverter(converter)
.setColumnIndex(columnNames.indexOf(columnName)));
}
return resultSetMappingFields;
}
}

View File

@@ -27,13 +27,13 @@ public class StbPublicationsDetail extends EntityBase {
private String entityName;
@SqlField(value = "active", defaultObjectValue="1", nullable = false)
private boolean active;
private Boolean active;
@SqlField(value = "export_history", defaultObjectValue="0", nullable = false)
private boolean exportHistory;
private Boolean exportHistory;
@SqlField(value = "ready_to_transmit", defaultObjectValue="0", nullable = false)
private boolean readyToTransmit;
private Boolean readyToTransmit;
@SqlField(value = "recalc_columns")
private String recalcColumnsField;
@@ -70,29 +70,29 @@ public class StbPublicationsDetail extends EntityBase {
return this;
}
public boolean isActive() {
public Boolean isActive() {
return active;
}
public StbPublicationsDetail setActive(boolean active) {
public StbPublicationsDetail setActive(Boolean active) {
this.active = active;
return this;
}
public boolean isExportHistory() {
public Boolean isExportHistory() {
return exportHistory;
}
public StbPublicationsDetail setExportHistory(boolean exportHistory) {
public StbPublicationsDetail setExportHistory(Boolean exportHistory) {
this.exportHistory = exportHistory;
return this;
}
public boolean isReadyToTransmit() {
public Boolean isReadyToTransmit() {
return readyToTransmit;
}
public StbPublicationsDetail setReadyToTransmit(boolean readyToTransmit) {
public StbPublicationsDetail setReadyToTransmit(Boolean readyToTransmit) {
this.readyToTransmit = readyToTransmit;
return this;
}

View File

@@ -1,11 +1,15 @@
package it.integry.ems_model.utility;
import com.annimon.stream.Stream;
import com.microsoft.sqlserver.jdbc.SQLServerResultSet;
import com.microsoft.sqlserver.jdbc.SQLServerStatement;
import it.integry.common.var.CommonConstants;
import it.integry.ems.expansion.RunnableArgs;
import it.integry.ems_model.annotation.EntityChild;
import it.integry.ems_model.base.EntityBase;
import it.integry.ems_model.base.EntityPropertyHolder;
import it.integry.ems_model.config.EmsRestConstants;
import it.integry.ems_model.db.AsyncResultSetMapper;
import it.integry.ems_model.db.ResultSetMapper;
import it.integry.ems_model.exception.DataConverterNotFoundException;
import it.integry.ems_model.types.OperationType;
@@ -529,6 +533,17 @@ public class UtilityDB {
return executeSimpleQueryDTO(conn, querySql, clazz, null);
}
public static <T> void executeSimpleQueryDTOAsync(Connection conn, String querySql, Class<T> clazz, RunnableArgs<T> onElementRetrieved) throws SQLException, DataConverterNotFoundException, InstantiationException, IllegalAccessException {
try (SQLServerStatement ps = (SQLServerStatement) conn.createStatement()) {
ps.setResponseBuffering("adaptive");
try (SQLServerResultSet rs = (SQLServerResultSet) ps.executeQuery(querySql)) {
new AsyncResultSetMapper()
.mapResultSetToList(rs, clazz, OperationType.SELECT_OBJECT, onElementRetrieved);
}
}
}
@Nullable
public static <T> List<T> executeSimpleQueryDTO(Connection conn, String querySql, Class<T> clazz, Integer timeout) throws SQLException, DataConverterNotFoundException, InstantiationException, IllegalAccessException {

View File

@@ -44,7 +44,7 @@
<brt.tracknumber.listener.enabled>true</brt.tracknumber.listener.enabled>
<custom.object.mapper>jacksonResponseObjectMapper</custom.object.mapper>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<gson.version>2.5</gson.version>
<gson.version>2.9.1</gson.version>
<httpclient.version>4.3.6</httpclient.version>
</properties>