Думаю, этого можно достичь без udf -
WIth UDF
val Data = spark.range(2).withColumn("Ip", lit(10))
val myUdf1 = udf((Number: Long) => ((Number) >> 24) & 255)
val myUdf2 = udf((Number: Long) => ((Number) >> 16) & 255)
val myUdf3 = udf((Number: Long) => ((Number) >> 8) & 255)
val myUdf4 = udf((Number: Long) => (Number) & 255)
val df=Data.withColumn("bitwise 1", myUdf1(Data("Ip")))
.withColumn("bitwise 2", myUdf2(Data("Ip")))
.withColumn("bitwise 3", myUdf3(Data("Ip")))
.withColumn("bitwise 4", myUdf4(Data("Ip")))
val FinalDF = df.withColumn("FinalIp",concat(col("bitwise 1"),lit("."),col("bitwise 2"),lit("."),col("bitwise 3"),lit("."),col("bitwise 4")))
.drop("bitwise 1").drop("bitwise 2").drop("bitwise 3").drop("bitwise 4")
FinalDF.show(false)
/**
* +---+---+--------+
* |id |Ip |FinalIp |
* +---+---+--------+
* |0 |10 |0.0.0.10|
* |1 |10 |0.0.0.10|
* +---+---+--------+
*/
Без UDF
spark.range(2).withColumn("Ip", lit(10))
.withColumn("FinalIp",
concat_ws(".", expr("shiftRight(Ip, 24) & 255"), expr("shiftRight(Ip, 16) & 255"),
expr("shiftRight(Ip, 8) & 255"), expr("Ip & 255"))
).show(false)
/**
* +---+---+--------+
* |id |Ip |FinalIp |
* +---+---+--------+
* |0 |10 |0.0.0.10|
* |1 |10 |0.0.0.10|
* +---+---+--------+
*/