arquetTableSupport.scala:220)
>> >>>> at
>> >>>>
>> >>>>
>> org.apache.spark.sql.parquet.RowWriteSupport.writeValue(ParquetTableSupport.scala:192)
>> >>>> at
>> >>>>
>> >>&
uetRelation2.org>$apache$spark$sql$parquet$ParquetRelation2$writeShard$1(newParquet.scala:671)
>>>> at
>>>>
>>>>
org.apache.spark.sql.parquet.ParquetRelation2$anonfun$insert$2.apply(newParquet.scala:689)
>>
at
> >>>> parquet.hadoop.ParquetRecordWriter.write(ParquetRecordWriter.java:81)
> >>>> at
> >>>> parquet.hadoop.ParquetRecordWriter.write(ParquetRecordWriter.java:37)
> >>>> at
> >>>>
> >>>> org.apac
.ParquetRecordWriter.write(ParquetRecordWriter.java:37)
>>>> at
>>>>
>>>> org.apache.spark.sql.parquet.ParquetRelation2.org$apache$spark$sql$parquet$ParquetRelation2$writeShard$1(newParquet.scala:671)
>>>> at
>>>>
>>>> or
;> org.apache.spark.sql.parquet.ParquetRelation2$anonfun$insert$2.apply(newParquet.scala:689)
>>> at
>>> org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>>> at org.apache.spark.scheduler.Task.run(Task.scala:64)
>>> at
>&g
eadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
How to fix this problem ?
--
View this message in context:
http://apache-spark-user-list.1001560.n3.n
apache.spark.scheduler.Task.run(Task.scala:64)
>> at
>> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203)
>> at
>>
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>> at
>>
>
problem ?
--
View this message in context:
http://apache-spark-user-list.1001560.n3.nabble.com/BigDecimal-problem-in-parquet-file-tp23221.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.
-
To unsubscribe
)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
How to fix this problem ?
--
View this message in context:
http://apache-spark-user-list.1001560.n3.nabble.com/BigDecimal-problem-in-parquet-file-tp23221.html