from pyspark.sql import SparkSession
my_spark = SparkSession \
.builder \
.appName("myApp") \
.config("spark.mongodb.input.uri", "mongodb://127.0.0.1/testtt.coll") \
.config("spark.mongodb.output.uri", "mongodb://127.0.0.1/testtt.coll") \
.getOrCreate()
df = spark.read.csv(path = "file:///home/user/Desktop/testtt.csv", header=True, inferSchema=True)
df.write.format("com.mongodb.spark.sql.DefaultSource").mode("append").option("database","testtt").option("collection", "coll").save()