Вы можете использовать pattern.unapplySeq(string)
внутри map
, чтобы получить List
всех совпадений группы с регулярным выражением.
Например, если у вас есть строка:
val str = "2018-04-11 06:27:36 localhost debug: localhost received discover from 0.0.0.0"
и вы выполняете:
pattern_1.unapplySeq(str)
Вы получите:
Option[List[String]] = Some(List(2018-04-11 06:27:36, localhost, debug, localhost, discover, 0.0.0.0))
Я использовал ваш пример для этого решения.В этом ответе предполагается, что определенный тип журнала и связанный с ним тип сообщения, содержимое и секунды будут напечатаны с одной и той же отметкой времени.
// case class defintions here
// regex pattern_1, pattern_2, pattern_3 defined here
val rdd = sc.textFile("file").cache
// Filter in 3 rdds based on the pattern that gets matched
val receivedRdd = rdd.filter(_.matches(pattern_1.toString)).map(pattern_1.unapplySeq(_).get)
val sentRdd = rdd.filter(_.matches(pattern_3.toString)).map(pattern_3.unapplySeq(_).get)
val otherRdd = rdd.filter(_.matches(pattern_2.toString)).map(pattern_2.unapplySeq(_).get)
// Convert it to a dataframe
// Names are matching with case class Rlog and Slog
// To facilitate the conversion to Datasets
val receivedDF = receivedRdd.map{ case List(a,b,c,d,e,f) => (a,b,c,d,e,f)}
.toDF("dateTime" , "server_name", "log_type", "server_addr", "action", "target_addr")
val sentDF = sentRdd.map{ case List(a,b,c,d,e,f) => (a,b,c,d,e,f)}
.toDF("dateTime" , "server_name", "log_type", "server_addr", "action", "target_addr")
// Convert multiple lines containing msg-type, content etc to single line using pivot
val otherDF = otherRdd.map{ case List(ts , srvr, typ, i1 , i2) => (ts , srvr, typ, i1 , i2) }
.toDF("dateTime" , "server_name", "log_type", "i1" , "i2")
.groupBy("dateTime" , "server_name", "log_type")
.pivot("i1").agg(first($"i2") )
.select($"dateTime", $"server_name", $"log_type", $"sec".as("cost") , $"Msg-Type".as("msg_type"), $"Content".as("content"))
otherDF.show
//+-------------------+-----------+--------+----+--------+----------+
//| dateTime|server_name|log_type|cost|msg_type| content|
//+-------------------+-----------+--------+----+--------+----------+
//|2018-04-11 06:27:34| localhost| debug| 0.3| text|XXXXXXXXXX|
//|2018-04-11 06:27:36| localhost| debug| 0.4| text|XXXXXXXXXX|
//+-------------------+-----------+--------+----+--------+----------+
// Finally join based on dateTime, server_name and log_type and convert to Datasets
val RlogDS = receivedDF.join(otherDF, Seq("dateTime" , "server_name", "log_type")).as[Rlog]
val SlogDS = sentDF.join(otherDF, Seq("dateTime" , "server_name", "log_type")).as[Slog]
RlogDS.show(false)
//+-------------------+-----------+--------+-----------+--------+-----------+----+--------+----------+
//| dateTime|server_name|log_type|server_addr| action|target_addr|cost|msg_type| content|
//+-------------------+-----------+--------+-----------+--------+-----------+----+--------+----------+
//|2018-04-11 06:27:36| localhost| debug| localhost|discover| 0.0.0.0| 0.4| text|XXXXXXXXXX|
//+-------------------+-----------+--------+-----------+--------+-----------+----+--------+----------+
SlogDS.show(false)
//+-------------------+-----------+--------+-----------+--------+-----------+----+--------+----------+
//|dateTime |server_name|log_type|server_addr|action |target_addr|cost|msg_type|content |
//+-------------------+-----------+--------+-----------+--------+-----------+----+--------+----------+
//|2018-04-11 06:27:34|localhost |debug |localhost |response|0.0.0.0 |0.3 |text |XXXXXXXXXX|
//+-------------------+-----------+--------+-----------+--------+-----------+----+--------+----------