![](/img/trans.png)
[英]Spring forms: “Type [java.lang.String] is not valid for option items”
[英]Getting error like java.lang.String is not a valid external type for schema of double In below code
我的代碼如下所示:
object DataTypeValidation extends Logging {
def main(args: Array[String]) {
val spark = SparkSession.builder()
.appName("SparkProjectforDataTypeValidation")
.master("local")
.getOrCreate();
spark.sparkContext.setLogLevel("ERROR")
try {
breakable {
val format = new SimpleDateFormat("d-M-y hh:mm:ss.SSSSS")
println("*********Data Type Validation Started*************** " + format.format(Calendar.getInstance().getTime()))
val data = Seq(Row(873131558, "ABC22"), Row(29000000, 99.00), Row(27000000, 2.34))
val schema = StructType(Array(
StructField("oldcl", IntegerType, nullable = true),
StructField("newcl", DoubleType, nullable = true))
)
val ONE = 1
var erroredRecordRow = new scala.collection.mutable.ListBuffer[Row]()
val newSchema = schema.fields.map({
case StructField(name, _: IntegerType, nullorNotnull, _) => StructField(name, StringType, nullorNotnull)
case StructField(name, _: DoubleType, nullorNotnull, _) => StructField(name, StringType, nullorNotnull)
case fields => fields
}).dropRight(ONE)
val newStructType = StructType { newSchema }
val df = spark.createDataFrame(spark.sparkContext.parallelize(data), schema)
df.show()
print(df.schema)
}
} catch {
case exception: Exception =>
println("exception caught in Data Type Mismatch In Schema Validation: " + exception.toString())
exception.printStackTrace();
}
spark.stop()
}
}
exception caught in Data Type Mismatch In Schema Validation: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): java.lang.RuntimeException: Error while encoding: java.lang.RuntimeException: java.lang.String is not a valid external type for schema of double
if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 0, oldcl), IntegerType) AS oldcl#0
if (assertnotnull(input[0, org.apache.spark.sql.Row, true]).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true]), 1, newcl), DoubleType) AS newcl#1
at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:292)
@AnkitTomar,此錯誤是由於字符串值ABC22
映射到DoubleType
。
請更新以下行
val data = Seq(Row(873131558, "ABC22"), Row(29000000, 99.00), Row(27000000, 2.34))
val schema = StructType(Array(
StructField("oldcl", IntegerType, nullable = true),
StructField("newcl", DoubleType, nullable = true))
)
和
val data = Seq(Row(873131558, "ABC22"), Row(29000000, "99.00"), Row(27000000, "2.34"))
val schema = StructType(Array(
StructField("oldcl", IntegerType, nullable = true),
StructField("newcl", StringType, nullable = true))
)
這樣您就可以檢索預期的結果,
val df = spark.createDataFrame(spark.sparkContext.parallelize(data), schema)
df.show()
/*
+---------+-----+
| oldcl|newcl|
+---------+-----+
|873131558|ABC22|
| 29000000|99.00|
| 27000000| 2.34|
+---------+-----+
*/
注意:我在您的代碼中找不到 newSchema 的用法,如果您遵循任何其他方法,請發表評論
val ONE = 1
var erroredRecordRow = new scala.collection.mutable.ListBuffer[Row]()
val newSchema = schema.fields.map({
case StructField(name, _: IntegerType, nullorNotnull, _) => StructField(name, StringType, nullorNotnull)
case StructField(name, _: DoubleType, nullorNotnull, _) => StructField(name, StringType, nullorNotnull)
case fields => fields
}).dropRight(ONE)
val newStructType = StructType { newSchema }
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.