from pyspark.sql import SparkSession

# The important thing that seems to be often omitted in the docs is the
# spark.jars.packages option.
# Be sure to change the value to reflect your particular version of the spark-mongo connector you are using

spark = SparkSession \
    .builder \
    .appName("pysparktestapp") \
    .config("spark.mongodb.input.uri", "mongodb://127.0.0.1/<database>.<collection>") \
    .config("spark.mongodb.output.uri", "mongodb://127.0.0.1/<database>.<collection>") \
    .config("spark.jars.packages", "org.mongodb.spark:mongo-spark-connector_2.11:2.4.0") \
    .getOrCreate()