Error - Failed to specify server's Kerberos principal name
I am trying to setup a Hadoop cluster using Kerberos. I managed to get the cluster working with Spark and Yarn before starting the Kerberos configuration. Currently my master and three nodes are running but i'm getting an error in the yarn logs.
Error:
java.io.IOException: Failed on local exception: java.io.IOException: java.lang.IllegalArgumentException : Failed to specify server's Kerberos principal name
core-site.xml
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://hadoopmaster:9000</value>
</property>
<!--Kerberos configuration-->
<property>
<name>hadoop.security.authentication</name>
<value>kerberos</value>
</property>
<property>
<name>hadoop.security.authorization</name>
<value>true</value>
</property>
<property>
<name>hadoop.security.auth_to_local</name>
<value>
RULE:[2:$1@$0](hdfs/.*@.*EXAMPLEREALM.COM)s/.*/hdfs/
RULE:[2:$1@$0](HTTP/.*@.*EXAMPLEREALM.COM)s/.*/hdfs/
RULE:[2:$1@$0](yarn/.*@.*EXAMPLEREALM.COM)s/.*/yarn/
DEFAULT
</value>
</property>
</configuration>
hdfs-site.xml
<configuration>
<property>
<name>dfs.namenode.name.dir</name>
<value>/home/hadoop/data/namenode</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/home/hadoop/data/datanode</value>
</property>
<property>
<name>dfs.replication<name>
<value>2</value>
</property>
<!-- General HDFS security config -->
<property>
<name>dfs.block.access.token.enable</name>
<value>true</value>
</property>
<!-- NameNode security config -->
<property>
<name>dfs.namenode.keytab.file</name>
<value>/etc/security/keytabs/hdfs.service.keytab</value> <!-- path to the HDFS keytab -->
</property>
<property>
<name>dfs.namenode.kerberos.principal</name>
<value>hdfs/[email protected]</value>
</property>
<property>
<name>dfs.namenode.kerberos.internal.spnego.principal</name>
<value>HTTP/[email protected]</value>
</property>
<!-- Secondary NameNode security config -->
<property>
<name>dfs.secondary.namenode.keytab.file</name>
<value>/etc/security/keytabs/hdfs.service.keytab</value> <!-- path to the HDFS keytab -->
</property>
<property>
<name>dfs.secondary.namenode.kerberos.principal</name>
<value>hdfs/[email protected]</value>
</property>
<property>
<name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
<value>HTTP/[email protected]</value>
</property>
<!-- DataNode security config -->
<property>
<name>dfs.datanode.data.dir.perm</name>
<value>700</value>
</property>
<property>
<name>dfs.datanode.address</name>
<value>0.0.0.0:1004</value>
</property>
<property>
<name>dfs.datanode.http.address</name>
<value>0.0.0.0:1006</value>
</property>
<property>
<name>dfs.datanode.keytab.file</name>
<value>/etc/security/keytabs/hdfs.service.keytab</value> <!-- path to the HDFS keytab -->
</property>
<property>
<name>dfs.datanode.kerberos.principal</name>
<value>hdfs/[email protected]</value>
</property>
<!-- Web Authentication config -->
<property>
<name>dfs.web.authentication.kerberos.principal</name>
<value>HTTP/[email protected]</value>
</property>
</configuration>
yarn-site.xml
<configuration>
<property>
<name>yarn.acl.enable</name>
<value>0</value>
</property>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>hadoopmaster</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce_shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.nodemanager.principal</name>
<value>yarn/[email protected]</value>
</property>
<property>
<name>yarn.nodemanager.keytab</name>
<value>/etc/security/keytabs/yarn.service.keytab</value>
</property>
</configuration>