More examples on GitHub
# Metrre ceci dans la première cellule quand vous voulez utiliser spark dans un notebook
!wget -q https://mirrors.netix.net/apache/spark/spark-3.1.1/spark-3.1.1-bin-hadoop3.2.tgz
!tar -xzf spark-3.1.1-bin-hadoop3.2.tgz
!pip install -q findspark
# define some evironement variable diretly with python instruction using the module os
import os
os.environ["JAVA_HOME"] = "/usr/lib/jvm/default-java"
os.environ["SPARK_HOME"] = "/content/spark-3.1.1-bin-hadoop3.2"
import findspark
findspark.init()
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("BDML").master("local[*]").getOrCreate()
sc = spark.sparkContext