ValueError Traceback (most recent call last)
<ipython-input-53-03432bbf269d> in <module>
----> 1 tr_df.head(5)
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/sql/dataframe.py
в голове (self, n) 1250 rs = self.head (1) 1251 возвращает rs [0], если rs еще нет -> 1252 возвращает self.take (n) 1253 1254 @ ignore_unicode_prefix
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/sql/dataframe.py
в дубле (self, num)569 [Row (age = 2, name = u'Alice '), Row (age = 5, name = u'Bob')] 570 "" "-> 571 return self.limit (num) .collect () 572573 @since (1.3)
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/sql/dataframe.py
в Collect (self) 532 с SCCallSiteSync (self._sc) as css: 533 sock_info = self._jdf.collectToPython () -> 534 возвращаемый список (_load_from_socket (sock_info, BatchedSerializer (PickleSerializer ()))) 535 536 @ ignore_unicode_prefix
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/serializers.py
в load_stream (self, stream) 145, в то время как True: 146 try: -> 147 выдают self._read_with_length (stream) 148 кроме, кромеEOFError: 149 return
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/serializers.py
in _read_with_length (self, stream) 170, если len (obj) <длина: 171 повысить EOFError -> 172 вернуть self.loads (obj) 173 174 def dumps (self,объект):
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/serializers.py
в нагрузках (self, obj, кодировка) 578 if sys.version> = '3': 579 def загрузки (self, obj, encoding = "bytes"): -> 580 return pickle.loads (obj, encoding = encoding) 581 else: 582 def загружает (self, obj, encoding = None):
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/sql/types.py
в _parse_datatype_json_string (json_string) 867 >>> check_datatype (complex_maptype) 868 "" "->869 возвращают _parse_datatype_json_value (json.loads (json_string)) 870 871
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/sql/types.py
в _parse_datatype_json_value (json_value) 884 tpe = json_value ["type"] 886__________>] .fromJson (json_value) 887 elif tpe == 'udt': 888 return UserDefinedType.fromJson (json_value)
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/sql/types.py
в fromJson (cls, json) 575 @classmethod 576 def изJson (cls, json): -> 577 вернуть StructType ([StructField.fromJson (f) для f в json ["fields"]]) 578 579 def fieldNames (self):
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/sql/types.py
in (.0) 575 @classmethod 576 def fromJson (cls, json): -> 577 return StructType ([StructField.fromJson (f) для f в json ["fields"]])578 579 def fieldNames (self):
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/sql/types.py
в fromJson (cls, json) 432 def fromJson (cls, json): 433 вернуть StructField (json ["name"], -> 434 _parse_datatype_json_value (json ["type"]), 435 json ["nullable"], 436 json ["metadata"])
~/anaconda3/envs/naboo-env/lib/python3.6/site-packages/pyspark/sql/types.py
в _parse_datatype_json_value (json_value) 880 возвращают DecimalType (int (m.group (1))), int (m.group (2))) 881 else: -> 882 повысить ValueError («Не удалось проанализировать тип данных:% s»% json_value) 883 else: 884 tpe = json_value ["type"]
ValueError: Could not parse datatype: decimal(17,-24)