Вам нужно сгруппироватьBy, получить счет и присоединиться к вашему исходному фрейму данных
scala> val df = Seq(("XMN", "DIS"), ("XMN", "CNN"), ("XMN", "JFK"), ("ALK", "DIS"), ("ALK", "CNN")).toDF("Network", "Station")
df: org.apache.spark.sql.DataFrame = [Network: string, Station: string]
scala> df.show
+-------+-------+
|Network|Station|
+-------+-------+
| XMN| DIS|
| XMN| CNN|
| XMN| JFK|
| ALK| DIS|
| ALK| CNN|
+-------+-------+
scala> val grpCountDF = df.groupBy("Network").count
grpCountDF: org.apache.spark.sql.DataFrame = [Network: string, count: bigint]
scala> grpCountDF.show
+-------+-----+
|Network|count|
+-------+-----+
| XMN| 3|
| ALK| 2|
+-------+-----+
scala> val outputDF = df.join(grpCountDF, "Network")
outputDF: org.apache.spark.sql.DataFrame = [Network: string, Station: string ... 1 more field]
scala> outputDF.show
+-------+-------+-----+
|Network|Station|count|
+-------+-------+-----+
| XMN| DIS| 3|
| XMN| CNN| 3|
| XMN| JFK| 3|
| ALK| DIS| 2|
| ALK| CNN| 2|
+-------+-------+-----+