Search code examples
javaapache-spark

symbolic reference class is not accessible: class sun.util.calendar.ZoneInfo, from interface spark.sql.catalyst.util.SparkDateTimeUtils


While trying to write Spark (v4.0-preview1) Dataframe to database table (SQL Server) with JDBC driver. Getting the following error.

java.lang.IllegalAccessException: symbolic reference class is not accessible: class sun.util.calendar.ZoneInfo, from interface org.apache.spark.sql.catalyst.util.SparkDateTimeUtils (unnamed module @7bbc8656)
    at java.base/java.lang.invoke.MemberName.makeAccessException(MemberName.java:955) ~[?:?]
    at java.base/java.lang.invoke.MethodHandles$Lookup.checkSymbolicClass(MethodHandles.java:3686) ~[?:?]
    at java.base/java.lang.invoke.MethodHandles$Lookup.resolveOrFail(MethodHandles.java:3646) ~[?:?]
    at java.base/java.lang.invoke.MethodHandles$Lookup.findVirtual(MethodHandles.java:2680) ~[?:?]
    at org.apache.spark.sql.catalyst.util.SparkDateTimeUtils.org$apache$spark$sql$catalyst$util$SparkDateTimeUtils$$getOffsetsByWallHandle(SparkDateTimeUtils.scala:206) ~[spark-sql-api_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.catalyst.util.SparkDateTimeUtils.org$apache$spark$sql$catalyst$util$SparkDateTimeUtils$$getOffsetsByWallHandle$(SparkDateTimeUtils.scala:201) ~[spark-sql-api_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.catalyst.util.DateTimeUtils$.org$apache$spark$sql$catalyst$util$SparkDateTimeUtils$$getOffsetsByWallHandle$lzycompute(DateTimeUtils.scala:41) ~[spark-catalyst_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.catalyst.util.DateTimeUtils$.org$apache$spark$sql$catalyst$util$SparkDateTimeUtils$$getOffsetsByWallHandle(DateTimeUtils.scala:41) ~[spark-catalyst_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.catalyst.util.SparkDateTimeUtils.toJavaDate(SparkDateTimeUtils.scala:228) ~[spark-sql-api_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.catalyst.util.SparkDateTimeUtils.toJavaDate$(SparkDateTimeUtils.scala:223) ~[spark-sql-api_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.catalyst.util.DateTimeUtils$.toJavaDate(DateTimeUtils.scala:41) ~[spark-catalyst_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.catalyst.util.DateTimeUtils.toJavaDate(DateTimeUtils.scala) ~[spark-catalyst_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificSafeProjection.createExternalRow_0_2$(Unknown Source) ~[?:?]
    at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificSafeProjection.apply(Unknown Source) ~[?:?]
    at scala.collection.Iterator$$anon$9.next(Iterator.scala:584) ~[scala-library-2.13.14.jar:?]
    at scala.collection.Iterator$$anon$9.next(Iterator.scala:584) ~[scala-library-2.13.14.jar:?]
    at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.savePartition(JdbcUtils.scala:806) ~[spark-sql_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.$anonfun$saveTable$1(JdbcUtils.scala:978) ~[spark-sql_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.$anonfun$saveTable$1$adapted(JdbcUtils.scala:977) ~[spark-sql_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.rdd.RDD.$anonfun$foreachPartition$2(RDD.scala:1042) ~[spark-core_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.rdd.RDD.$anonfun$foreachPartition$2$adapted(RDD.scala:1042) ~[spark-core_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2501) ~[spark-core_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) ~[spark-core_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:171) ~[spark-core_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.scheduler.Task.run(Task.scala:146) ~[spark-core_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$5(Executor.scala:640) ~[spark-core_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:99) ~[spark-core_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:643) [spark-core_2.13-4.0.0-preview1.jar:4.0.0-preview1]
    at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
    at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
    at java.base/java.lang.Thread.run(Thread.java:840) [?:?]

Below is an excerpt from my pom.xml as per Chris' suggestions. Hope this helps!


    <properties>
        <maven.compiler.source>17</maven.compiler.source>
        <maven.compiler.target>17</maven.compiler.target>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <spring-boot.repackage.skip>true</spring-boot.repackage.skip>
    
        <!-- SPARK-36796 for JDK-17 test-->
        <extraJavaTestArgs>
            -XX:+IgnoreUnrecognizedVMOptions
            --add-modules=jdk.incubator.vector
            --add-opens=java.base/java.lang=ALL-UNNAMED
            --add-opens=java.base/java.lang.invoke=ALL-UNNAMED
            --add-opens=java.base/java.lang.reflect=ALL-UNNAMED
            --add-opens=java.base/java.io=ALL-UNNAMED
            --add-opens=java.base/java.net=ALL-UNNAMED
            --add-opens=java.base/java.nio=ALL-UNNAMED
            --add-opens=java.base/java.util=ALL-UNNAMED
            --add-opens=java.base/java.util.concurrent=ALL-UNNAMED
            --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
            --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED
            --add-opens=java.base/sun.nio.ch=ALL-UNNAMED
            --add-opens=java.base/sun.nio.cs=ALL-UNNAMED
            --add-opens=java.base/sun.security.action=ALL-UNNAMED
            --add-opens=java.base/sun.util.calendar=ALL-UNNAMED
            -Djdk.reflect.useDirectMethodHandle=false
            -Dio.netty.tryReflectionSetAccessible=true
        </extraJavaTestArgs>
    </properties>
    
    <dependencies>
        <dependency>
    <!--        all other dependencies-->
        </dependency>
    </dependencies>
    
    <plugins>
        <plugin>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-maven-plugin</artifactId>
            <configuration>
                <jvmArguments>
                    --add-opens=java.base/java.lang=ALL-UNNAMED
                    --add-opens=java.base/java.lang.invoke=ALL-UNNAMED
                    --add-opens=java.base/java.lang.reflect=ALL-UNNAMED
                    --add-opens=java.base/java.io=ALL-UNNAMED
                    --add-opens=java.base/java.net=ALL-UNNAMED
                    --add-opens=java.base/java.nio=ALL-UNNAMED
                    --add-opens=java.base/java.util=ALL-UNNAMED
                    --add-opens=java.base/java.util.concurrent=ALL-UNNAMED
                    --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
                    --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED
                    --add-opens=java.base/sun.nio.ch=ALL-UNNAMED
                    --add-opens=java.base/sun.nio.cs=ALL-UNNAMED
                    --add-opens=java.base/sun.security.action=ALL-UNNAMED
                    --add-opens=java.base/sun.util.calendar=ALL-UNNAMED
                </jvmArguments>
            </configuration>
        </plugin>
    </plugins>

Also, tried the above options with --add-exports. But couldn't resolve the issue!

This seems to be fixed as per this pull request in Spark Spark-48185, but still getting this error!

Can someone please suggest a solution for the above issue!


Solution

  • Please follow the instructions here: https://stackoverflow.com/a/78300174/1028537

    In short you need to use the options found in Spark itself, which aren't well documented, to run on jdk17: JavaModuleOptions.

    You can find this in the build files as well: pom.xml test args.

    Spark connect features it too: spark-connect-scala-client:

    -XX:+IgnoreUnrecognizedVMOptions \
      --add-opens=java.base/java.lang=ALL-UNNAMED \
      --add-opens=java.base/java.lang.invoke=ALL-UNNAMED \
      --add-opens=java.base/java.lang.reflect=ALL-UNNAMED \
      --add-opens=java.base/java.io=ALL-UNNAMED \
      --add-opens=java.base/java.net=ALL-UNNAMED \
      --add-opens=java.base/java.nio=ALL-UNNAMED \
      --add-opens=java.base/java.util=ALL-UNNAMED \
      --add-opens=java.base/java.util.concurrent=ALL-UNNAMED \
      --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED \
      --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED \
      --add-opens=java.base/sun.nio.ch=ALL-UNNAMED \
      --add-opens=java.base/sun.nio.cs=ALL-UNNAMED \
      --add-opens=java.base/sun.security.action=ALL-UNNAMED \
      --add-opens=java.base/sun.util.calendar=ALL-UNNAMED \
      --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED \
      -Djdk.reflect.useDirectMethodHandle=false \
      -Dio.netty.tryReflectionSetAccessible=true