1
votes

I want to process hive table using spark , bellow the configuration i did : but when runnin my program

i got this error :

Exception in thread "main" java.lang.IllegalArgumentException: Unable to instantiate SparkSession with Hive support because Hive classes are not found.

=================================================================================

my Code :

object spark_on_hive_table extends App {

// val warehouseLocation = new File("spark-warehouse").getAbsolutePath

val sparksess = SparkSession

.builder()
.appName("Spark Hive Example")
.config("spark.sql.warehouse.dir", "hdfs://localhost:54310/user/hive/warehouse")
.enableHiveSupport()
.getOrCreate()

import sparksess.implicits._

import sparksess.sql

// ==> hdfs://localhost:54310/user/hive/warehouse

sql ("select * from pbSales").show()

}

=============================build.sbt======================================================

version := "0.1"

scalaVersion := "2.11.12"

"org.apache.spark" %% "spark-core" % sparkVersion,

"org.apache.spark" %% "spark-sql" % sparkVersion,

"org.apache.spark" %% "spark-streaming" % sparkVersion,

"org.apache.spark" %% "spark-hive" % "2.3.2" % "provided"

)

1
sparkVersion = ? - mvasyliv
Sparkversion : 2.3.2 - amine djeddi

1 Answers

2
votes
"org.apache.spark" %% "spark-hive" % "2.3.2" % "provided" 
change to 
"org.apache.spark" %% "spark-hive" % "2.3.2"