вы можете использовать struct. Вот пример:
import spark.implicits._
val df = Seq( ("a",1),("b",2),("b",1)
).toDF("type","count")
.withColumn("description", expr("case when count> 0 then struct('gain' as scale, 'positive' as category) else struct('loss' as scale,'negative' as category) end"))
.select(col("type"), col("count"), col("description.scale").as("scale"), col("description.category").as("category"));
df.show()
+----+-----+-----+--------+
|type|count|scale|category|
+----+-----+-----+--------+
| a| 1| gain|positive|
| b| 2| gain|positive|
| b| 1| gain|positive|
+----+-----+-----+--------+