0
votes

Here is my code from IntelliJ:

package com.dmngaya
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession

object ReadVertexPage {

def main(args: Array[String]): Unit = {
val conf: SparkConf = new SparkConf().setAppName("ReadVertexPage").setMaster("local")
val sc: SparkContext = new SparkContext(conf)
val spark = SparkSession
  .builder()
  .appName("Spark SQL basic example")
  .getOrCreate()

val jdbcDF1 = spark.read.format("jdbc").options(
  Map(
    "driver" -> "com.tigergraph.jdbc.Driver",
    "url" -> "jdbc:tg:http://127.0.0.1:14240",
    "username" -> "tigergraph",
    "password" -> "tigergraph",
    "graph" -> "gsql_demo", // graph name
    "dbtable" -> "vertex Page", // vertex type
    "limit" -> "10", // number of vertices to retrieve
    "debug" -> "0")).load()

 jdbcDF1.show
   }

}

When I run it in spark-shell, it is running file: /opt/spark/bin/spark-shell --jars /home/tigergraph/ecosys/tools/etl/tg-jdbc-driver/tg-jdbc-driver/target/tg-jdbc-driver-1.2.jar

scala> val jdbcDF1 = spark.read.format("jdbc").options(
 |   Map(
 |     "driver" -> "com.tigergraph.jdbc.Driver",
 |     "url" -> "jdbc:tg:http://127.0.0.1:14240",
 |     "username" -> "tigergraph",
 |     "password" -> "tigergraph",
 |     "graph" -> "gsql_demo", // graph name
 |     "dbtable" -> "vertex Page", // vertex type
 |     "limit" -> "10", // number of vertices to retrieve
 |     "debug" -> "0")).load()
 jdbcDF1: org.apache.spark.sql.DataFrame = [v_id: string, page_id: string]

 scala> jdbcDF1.show
 result:
 +----+--------+                                                                 
|v_id| page_id|
+----+--------+
|   7|       7|
|   5|       5|
|  10|      10|
|1002|    1002|
|   3|       3|
|1000|new page|
|1003|    1003|
|   1|       1|
|   6|       6|
|1001|        |

From IntelliJ, I have the following error:

20/11/23 10:43:43 INFO SharedState: Setting hive.metastore.warehouse.dir ('null') to the value of spark.sql.warehouse.dir ('file:/home/tigergraph/fiverr-2/spark-warehouse'). 20/11/23 10:43:43 INFO SharedState: Warehouse path is 'file:/home/tigergraph/fiverr-2/spark-warehouse'. Exception in thread "main" java.lang.ClassNotFoundException: Failed to find data source: jdbc. Please find packages at http://spark.apache.org/third-party-projects.html at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:679) at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSourceV2(DataSource.scala:733) at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:248) at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:221) at com.dmngaya.ReadVertexPage$.main(ReadVertexPage.scala:25) at com.dmngaya.ReadVertexPage.main(ReadVertexPage.scala) Caused by: java.lang.ClassNotFoundException: jdbc.DefaultSource at java.net.URLClassLoader.findClass(URLClassLoader.java:381) at java.lang.ClassLoader.loadClass(ClassLoader.java:424) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335) at java.lang.ClassLoader.loadClass(ClassLoader.java:357) at org.apache.spark.sql.execution.datasources.DataSource$.$anonfun$lookupDataSource$5(DataSource.scala:653) at scala.util.Try$.apply(Try.scala:213) at org.apache.spark.sql.execution.datasources.DataSource$.$anonfun$lookupDataSource$4(DataSource.scala:653) at scala.util.Failure.orElse(Try.scala:224) at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:653) ... 5 more 20/11/23 10:43:46 INFO SparkContext: Invoking stop() from shutdown hook 20/11/23 10:43:46 INFO SparkUI: Stopped Spark web UI at http://tigergraph-01:4040 20/11/23 10:43:46 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! 20/11/23 10:43:46 INFO MemoryStore: MemoryStore cleared 20/11/23 10:43:46 INFO BlockManager: BlockManager stopped 20/11/23 10:43:47 INFO BlockManagerMaster: BlockManagerMaster stopped 20/11/23 10:43:47 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! 20/11/23 10:43:47 INFO SparkContext: Successfully stopped SparkContext 20/11/23 10:43:47 INFO ShutdownHookManager: Shutdown hook called 20/11/23 10:43:47 INFO ShutdownHookManager: Deleting directory /tmp/spark-66dd4dc4-c70b-4836-805b-d68b3183ccbf Process finished with exit code 1

How can I fix that?

1

1 Answers

0
votes

You should add the dependency tg-jdbc-driver-1.2 in your pom/sbt.