在配置jupyter笔记本时收到相同的错误消息,有人尝试过CDH上的Spark2 环保:CDH 5.15,Spark 2.3
{
"display_name": "PySpark/Python3 (Anaconda)",
"language": "python",
"argv": [
"/usr/bin/python3.6",
"-m",
"ipykernel",
"-f",
"{connection_file}"
],
"env": {
"HADOOP_CONF_DIR":"/etc/hadoop/conf",
"PYSPARK_PYTHON":"/usr/bin/python3.6",
"PYSPARK_DRIVER_PYTHON":"jupyter",
"SPARK_HOME": "/opt/cloudera/parcels/CDH/lib/spark",
"spark.yarn.appMasterEnv.PYSPARK_PYTHON":"/usr/bin/python3.6",
"spark.yarn.appMasterEnv.PYSPARK_DRIVER_PYTHON":"/usr/bin/python3.6",
"PYTHONPATH": "/opt/cloudera/parcels/CDH/lib/spark:/opt/cloudera/parcels/SPARK2/lib/spark2/jars/py4j-0.10.7.jar"
}
}