Получил java.lang.ArrayIndexOutOfBoundsException: 42 при вставке в улей из df-зарегистрированной таблицы - PullRequest
0 голосов
/ 25 ноября 2018

Вот мой код:

{
   var temptablename = "temp"+tableshotname
   dffiles.createOrReplaceTempView(temptablename)
   sparkSession.sql("INSERT INTO TABLE " + database + "." + tablename + " SELECT " + schemaString + ", current_timestamp() FROM " + temptablename)
}

Я получил сообщение об ошибке ниже:

 {...
if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 37, fw), StringType), true, false) AS fw#95
    if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 38, shippingtype), StringType), true, false) AS shippingtype#96
    if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 39, moconfigid), StringType), true, false) AS moconfigid#97
    if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 40, inputdate), StringType), true, false) AS inputdate#98
    if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 41, mtevendorplant), StringType), true, false) AS mtevendorplant#99
    if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 42, mtevendorsystem), StringType), true, false) AS mtevendorsystem#100
    if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 43, mtefilename), StringType), true, false) AS mtefilename#101
        at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:291)
        at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:589)
        at org.apache.spark.sql.SparkSession$$anonfun$4.apply(SparkSession.scala:589)
        at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
        at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
        at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.agg_doAggregateWithoutKey$(Unknown Source)
        at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source)
        at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
        at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
        at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125)
        at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
        at 

    org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
            at org.apache.spark.scheduler.Task.run(Task.scala:109)
            at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
            at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
            at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
            at java.lang.Thread.run(Thread.java:748)
        Caused by: java.lang.ArrayIndexOutOfBoundsException: 42
            at org.apache.spark.sql.catalyst.expressions.GenericRow.get(rows.scala:174)
            at org.apache.spark.sql.Row$class.isNullAt(Row.scala:191)
            at org.apache.spark.sql.catalyst.expressions.GenericRow.isNullAt(rows.scala:166)
            at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.writeFields_21$(Unknown Source)
            at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source)
            at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:288)
            ... 17 more
     ...   }

Вот другие заявления, которые я пробовал, с тем же исключением:

{
sparkSession.sql("INSERT INTO TABLE " + database + "." + tablename + " SELECT " + schemaString + ", current_timestamp() FROM " + temptablename)
    dffiles.write.mode(saveMode = "Append").saveAsTable("Temp." + tablename)
sparkSession.sql("create table Temp." + tablename + " as  SELECT *, current_timestamp AS timestamp FROM " + temptablename)
    println("Inserted rows")
}

Я попробовал несколько других ответов и увеличил количество записей в тез-контейнерах, попытался переключиться на MR в качестве двигателя.Кроме того, приведенный ниже код пытается заменить нулевые значения во фрейме данных:

{ val rddfiles =
          sparkSession.read
            .option("header", "true")
            .option("inferschema", "true")
            .option("nullValue"," ")
            .textFile(inputPath)
            .select(input_file_name, $"value")
            .as[(String,String)] // Optionally convert to Dataset
            .rdd}

ИЛИ

{val newdf = dffiles.na.fill("'N/A'")}

Вышеприведенный оператор также вызвал такое же исключение.

Любой имеетесть идеи?

Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...