我使用以下方式初始化PySpark,并且正在使用Python3。
# Initialize
import findspark
findspark.init('/home/ubuntu/spark-2.1.1-bin-hadoop2.7')
import pyspark
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName('basics').getOrCreate()
问题是数据的某些列无法正常工作。 例如,当我尝试使用data.describe()。show()时 它显示了一个错误:
---------------------------------------------------------------------------
Py4JJavaError Traceback (most recent call last)
<ipython-input-162-dde026cdb0e0> in <module>()
----> 1 df.describe().show()
~/spark-2.1.1-bin-hadoop2.7/python/pyspark/sql/dataframe.py in describe(self, *cols)
900 if len(cols) == 1 and isinstance(cols[0], list):
901 cols = cols[0]
--> 902 jdf = self._jdf.describe(self._jseq(cols))
903 return DataFrame(jdf, self.sql_ctx)
904
~/spark-2.1.1-bin-hadoop2.7/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py in __call__(self, *args)
1131 answer = self.gateway_client.send_command(command)
1132 return_value = get_return_value(
-> 1133 answer, self.gateway_client, self.target_id, self.name)
1134
1135 for temp_arg in temp_args:
~/spark-2.1.1-bin-hadoop2.7/python/pyspark/sql/utils.py in deco(*a, **kw)
61 def deco(*a, **kw):
62 try:
---> 63 return f(*a, **kw)
64 except py4j.protocol.Py4JJavaError as e:
65 s = e.java_exception.toString()
~/spark-2.1.1-bin-hadoop2.7/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
317 raise Py4JJavaError(
318 "An error occurred while calling {0}{1}{2}.\n".
--> 319 format(target_id, ".", name), value)
320 else:
321 raise Py4JError(
Py4JJavaError: An error occurred while calling o3496.describe.
: java.util.NoSuchElementException: None.get
at scala.None$.get(Option.scala:347)
at scala.None$.get(Option.scala:345)
at ...
有人知道这里出了什么问题吗?