Я передаю данные в режиме реального времени от Кафки.Но данные в формате Avro.Невозможно десериализовать как Json.Я использую API-интерфейс процессора низкого уровня Kafka Stream.Как десериализовать Avro запись?
def orderStreamData(builder: KStreamBuilder, inTopic: String, outTopic: String): TopologyBuilder = {
builder
.addSource("source1", stringDe, stringDe, inTopic)//adding source topic
//now adding processor class using ProcessSupplier
.addProcessor("order", new ProcessorSupplier[String, String] {
override def get(): Processor[String, String] = new ProcessorImpl
}, "source1")
//adding local state store for stateful operations
.addStateStore(Stores.create("tester").withStringKeys.withStringValues.inMemory.build, "order")
//adding destination topic for the processed data to go
.addSink("sink", outTopic, stringSer, stringSer, "order")
}
class ProcessorImpl extends AbstractProcessor[String, String]{
var keyValueStore: KeyValueStore[String, String] = _
var processorContext: ProcessorContext = _
override def init(context: ProcessorContext): Unit = {
processorContext = context
processorContext.schedule(10000L)
keyValueStore = processorContext.getStateStore("tester").asInstanceOf[KeyValueStore[String, String]]
Objects.requireNonNull(keyValueStore, "State Store can't be null")
}
/**
* here logic is implemented
* every value for a key must be greater than the previous value
* */
override def process(key: String, value: String): Unit = {
//accessing local state store for last value saved for this key
}
}