spark mllib ALS.Rating get ClassCastException error
Icyrockton opened this issue · comments
reproduce by step or clone https://github.com/Icyrockton/spark_test
in build.gradle.kts
plugins {
kotlin("jvm") version "1.6.20"
}
group = "org.example"
version = "1.0-SNAPSHOT"
repositories {
mavenCentral()
}
dependencies {
implementation("org.jetbrains.kotlinx.spark:kotlin-spark-api-3.2:1.0.3")
implementation("org.apache.spark:spark-sql_2.12:3.2.0")
implementation("org.apache.spark:spark-streaming_2.12:3.2.0")
implementation("org.apache.spark:spark-streaming-kafka-0-10_2.12:2.4.8")
implementation("org.apache.spark:spark-mllib_2.12:3.2.0")
}
import org.apache.spark.ml.recommendation.ALS
import org.jetbrains.kotlinx.spark.api.c
import org.jetbrains.kotlinx.spark.api.map
import org.jetbrains.kotlinx.spark.api.withSpark
fun main() {
withSpark {
dsOf(c(1,0,0.4f),c(2,2,0.5f)).map {
ALS.Rating(it._1,it._2,it._3)
}.show()
}
}
error info
Exception in thread "main" java.lang.ClassCastException: kotlin.reflect.jvm.internal.KTypeImpl cannot be cast to kotlin.jvm.internal.TypeReference
at kotlin.jvm.internal.ReflectionFactory.platformType(ReflectionFactory.java:99)
at kotlin.jvm.internal.Reflection.platformType(Reflection.java:191)
at TestKt.main(Test.kt:55)
at TestKt.main(Test.kt)
Thanks for letting us know! It seems I didn't realize that Scala case classes are also considered Products and thus have names different than _1, _2 etc. A fix is on the way :)
Merged and will be in next update :)