'Scala exception in thread "main" java.lang.NoSuchMethodError

I am new to Scala programming and am using IntelliJ IDE. I am getting the below exception when I run my Scala sample code. Not sure if I am missing any dependency.

Sample code

package com.assessments.example

object Example extends App {


  //Create a spark context, using a local master so Spark runs on the local machine
  val spark = SparkSession.builder().master("local[*]").appName("ScoringModel").getOrCreate()

  //importing spark implicits allows functions such as dataframe.as[T]
  import spark.implicits._

  //Set logger level to Warn
  Logger.getRootLogger.setLevel(Level.WARN)

  case class CustomerData(
                           customerId: String,
                           forename: String,
                           surname: String
                         )
  case class FullName(
                       firstName: String,
                       surname: String
                     )

  case class CustomerModel(
                            customerId: String,
                            forename: String,
                            surname: String,
                            fullname: FullName
                          )

  val customerData = spark.read.option("header","true").csv("src/main/resources/customer_data.csv").as[CustomerData]

  val customerModel = customerData
    .map(
      customer =>
        CustomerModel(
          customerId = customer.customerId,
          forename = customer.forename,
          surname = customer.surname,
          fullname = FullName(
            firstName = customer.forename,
            surname = customer.surname))
    )

  customerModel.show(truncate = false)

  customerModel.write.mode("overwrite").parquet("src/main/resources/customerModel.parquet")
}

Exception message:

Exception in thread "main" java.lang.NoSuchMethodError: scala.collection.mutable.Buffer$.empty()Lscala/collection/GenTraversable;
    at org.apache.spark.sql.SparkSessionExtensions.<init>(SparkSessionExtensions.scala:103)
    at org.apache.spark.sql.SparkSession$Builder.<init>(SparkSession.scala:793)
    at org.apache.spark.sql.SparkSession$.builder(SparkSession.scala:984)
    at com.assessments.example.Example$.delayedEndpoint$com$assessments$example$Example$1(Example.scala:10)
    at com.assessments.example.Example$delayedInit$body.apply(Example.scala:6)
    at scala.Function0.apply$mcV$sp(Function0.scala:39)
    at scala.Function0.apply$mcV$sp$(Function0.scala:39)
    at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:17)
    at scala.App.$anonfun$main$1(App.scala:76)
    at scala.App.$anonfun$main$1$adapted(App.scala:76)
    at scala.collection.IterableOnceOps.foreach(IterableOnce.scala:563)
    at scala.collection.IterableOnceOps.foreach$(IterableOnce.scala:561)
    at scala.collection.AbstractIterable.foreach(Iterable.scala:926)
    at scala.App.main(App.scala:76)
    at scala.App.main$(App.scala:74)
    at com.assessments.example.Example$.main(Example.scala:6)
    at com.assessments.example.Example.main(Example.scala)

I am using spark version 3.1.2 and Scala version of 2.12.10. When I checked this version of Scala seems to support spark.

Appreciate any guidance on how to get this resolved. Thanks



Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source