UDF:
def myudf =(month:String,year:String ,value:String ) => {
val month1 = month.replaceAll("[A-Za-z]+","")
var date=1
val dateList = ListBuffer[String]()
for(char<-value){
if(char=='1'){
dateList += year+"-"+month1+"-"+date
} else {
dateList += ""
}
date += 1
}
dateList.filter(_.nonEmpty)
}
//Main Method
val data = spark.read.option("header", "true").csv("data.csv")
data.show()
+----+-----+-----+
|Year|Mon01|Mon02|
+----+-----+-----+
|2018|01110|00111|
|2019|01100|00001|
+----+-----+-----+
val myCostumeudf = udf(myudf)
val monthCols = data.columns.drop(1)
val requiredDF = monthCols.foldLeft(data){
case (df, month) =>
df.withColumn("Date_"+month, myCostumeudf(lit(month),data("Year"),data(month)))
}
requiredDF.show(false)
+----+-----+-----+---------------------------------+---------------------------------+
|Year|Mon01|Mon02|Date_Mon01 |Date_Mon02 |
+----+-----+-----+---------------------------------+---------------------------------+
|2018|01110|00111|[2018-01-2, 2018-01-3, 2018-01-4]|[2018-02-3, 2018-02-4, 2018-02-5]|
|2019|01100|00001|[2019-01-2, 2019-01-3] |[2019-02-5] |
+----+-----+-----+---------------------------------+---------------------------------+
Надеюсь, это поможет вам ..