У нас есть требование выполнять массовую запись вasticsearch. Мы хотели бы знать, существует ли лучший способ пакетирования данных и избежать потери данных при выполнении пакетирования
public void consume() {
logger.debug("raw consume......");
String topic = "json.incoming";
String consGroup = "rConsumerGroup";
Properties props = new Properties();
props.put("enable.auto.commit", "false");
props.put("session.timeout.ms", "20000");
props.put("max.poll.records", "10000");
consumer = new GenericConsumer<String, JsonNode>().initialize(topic, consGroup, STREAMSERDE.STRINGDESER, STREAMSERDE.JSONDESER, props);
logger.debug("Kafka Consumer Initialized......");
buffer = new ArrayList<MessageVO>();
while (true) {
try {
ConsumerRecords<String, JsonNode> records = consumer.poll(100);
Date startTime = Calendar.getInstance()
.getTime();
if (records.count() == 0 && !buffer.isEmpty()) {
lastSeenZeroPollCounter++;
}
if (records.count() > 0) {
logger.debug(">>records count = " + records.count());
for (ConsumerRecord<String, JsonNode> record : records) {
logger.debug("record.offset() = " + record.offset() + " : record.key() = " + record.key());
JsonNode jsonMessage = record.value();
logger.debug("incoming Message = " + jsonMessage);
ObjectMapper objectMapper = new ObjectMapper();
MessageVO rawMessage = objectMapper.convertValue(jsonMessage, MessageVO.class);
logger.info("Size of the buffer is " + buffer.size());
buffer.add(rawMessage);
}
Date endTime = Calendar.getInstance()
.getTime();
long durationInMilliSec = endTime.getTime() - startTime.getTime();
logger.debug("Number of Records:: " + records.count() + " Time took to process poll :: " + durationInMilliSec);
}
if ((buffer.size() >= 1000 && buffer.size() <= 3000) || (buffer.size() > 0 && lastSeenZeroPollCounter >= 3000)) {
lastSeenZeroPollCounter = 0;
List<RawSyslogMessageVO> clonedBuffer = deepCopy(buffer);
logger.info("The size of clonedBuffer is ::: " + clonedBuffer.size());
writerService.writeRaw(clonedBuffer);
buffer.clear();
}
consumer.commitSync();
} catch (Throwable throwable) {
logger.error("Error occured while processing message", throwable);
throwable.printStackTrace();
}
}
}
Код для клонирования данных во избежание потери данных
private List<MessageVO> deepCopy(List<MessageVO> messages) {
List<MessageVO> listOfMessages = new ArrayList<>();
logger.debug("DeepClone :: listOfMessages size ::: " + listOfMessages.size());
listOfMessages.addAll(messages);
return Collections.unmodifiableList(messages);
}
Любая помощь приветствуется. Спасибо.