I'm new to Big Data/Hadoop. I'm trying to connect to Hive from Eclipse using JDBC. I keep getting the following error. I have no idea what's going on. Please help.
ERROR StatusLogger No log4j2 configuration file found. Using default configuration: logging only errors to the console. SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/C:/Users/eragank/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.4.1/log4j-slf4j-impl-2.4.1.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/C:/Users/eragank/.m2/repository/org/slf4j/slf4j-log4j12/1.6.1/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory] Exception in thread "main" java.io.IOException: Login failure for hive/[email protected] from keytab /etc/security/keytabs/hive.service.keytab: javax.security.auth.login.LoginException: Unable to obtain password from user at org.apache.hadoop.security.UserGroupInformation.loginUserFromKeytab(UserGroupInformation.java:935) at com.ihg.test.Test.main(Test.java:32)
Caused by: javax.security.auth.login.LoginException: Unable to obtain password from user at com.sun.security.auth.module.Krb5LoginModule.promptForPass(Krb5LoginModule.java:897) at com.sun.security.auth.module.Krb5LoginModule.attemptAuthentication(Krb5LoginModule.java:760)
at com.sun.security.auth.module.Krb5LoginModule.login(Krb5LoginModule.java:617)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at javax.security.auth.login.LoginContext.invoke(LoginContext.java:755)
at javax.security.auth.login.LoginContext.access$000(LoginContext.java:195)
at javax.security.auth.login.LoginContext$4.run(LoginContext.java:682)
at javax.security.auth.login.LoginContext$4.run(LoginContext.java:680)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.login.LoginContext.invokePriv(LoginContext.java:680)
at javax.security.auth.login.LoginContext.login(LoginContext.java:587)
at org.apache.hadoop.security.UserGroupInformation.loginUserFromKeytab(UserGroupInformation.java:926) ... 1 more
Test.java
package com.test.Test;
import java.sql.SQLException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.Properties;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.jdbc.HiveDriver;
import java.sql.DriverManager;
public class Test {
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
/**
* @param args
* @throws SQLException
* @throws IOException
*/
public static void main(String[] args) throws SQLException, IOException {
try {
Class.forName(driverName);
System.out.println("Class FOUND!!");
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.exit(1);
}
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
conf.set("hadoop.security.authentication", "Kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab("hive/[email protected]", "/etc/security/keytabs/hive.service.keytab");
//replace "hive" here with the name of the user the queries should run as
Connection con = DriverManager.getConnection("jdbc:hive2://mksd1dlllytj001.c.iron-potion-771.internal:10000/grs_core?transportMode=http;httpPath=cliservice;auth=kerberos;sasl.qop=auth-int;principal=hive/[email protected]", "hive", "");
//Connection con = DriverManager.getConnection("jdbc:hive2://mksd1dlllytj001.c.iron-potion-771.internal:10000/;principal=hive/[email protected];transportMode=http;httpPath=cliservice");
// HiveDriver hd=new HiveDriver();
// System.out.println("HDB CON..");
//
// Connection con2= hd.connect("jdbc:hive2://mksd1dlllytj001.c.iron-potion-771.internal:10000/grs_core?transportMode=http;httpPath=cliservice;auth=kerberos;sasl.qop=auth-int");
//
// System.out.println("HDB CONN="+con2);
//
//;principal=hive/[email protected]
Statement stmt = con.createStatement();
/*String tableName = "testHiveDriverTable";
stmt.execute("drop table if exists " + tableName);
stmt.execute("create table " + tableName + " (key int, value string)");
// show tables
// String sql = "show tables '" + tableName + "'"; */
String sql = ("show tables;");
ResultSet res = stmt.executeQuery(sql);
if (res.next()) {
System.out.println(res.getString(1));
}
}
}
POM.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.ihg.ptfwork</groupId>
<artifactId>ptfUpdateTest</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>ptfUpdateTest</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<env.hiveVersion>1.2.1000.2.6.0.3-8</env.hiveVersion>
<env.hivecoreDir>/usr/hdp/current/hive-client/conf</env.hivecoreDir>
<env.hivecoreFile>hive-site.xml</env.hivecoreFile>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-beeline</artifactId>
<version>2.0.0</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.7.0_05</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
</dependencies>
</project>
klist -e -k xxxx.keytab
– Samson Scharfrichter