In my shell I do this, but get the result as below.
24king@24king-PC /usr/local/hadoop
$ sh -x bin/hdfs namenode -format
++ which bin/hdfs + bin=/usr/local/hadoop/bin/hdfs ++ dirname /usr/local/hadoop/bin/hdfs + bin=/usr/local/hadoop/bin ++ cd /usr/local/hadoop/bin ++ pwd + bin=/usr/local/hadoop/bin + DEFAULT_LIBEXEC_DIR=/usr/local/hadoop/bin/../libexec + HADOOP_LIBEXEC_DIR=/usr/local/hadoop/bin/../libexec + . /usr/local/hadoop/bin/../libexec/hdfs-config.sh +++ which bin/hdfs ++ bin=/usr/local/hadoop/bin/hdfs +++ dirname /usr/local/hadoop/bin/hdfs ++ bin=/usr/local/hadoop/bin +++ cd /usr/local/hadoop/bin +++ pwd ++ bin=/usr/local/hadoop/bin ++ DEFAULT_LIBEXEC_DIR=/usr/local/hadoop/bin/../libexec ++ HADOOP_LIBEXEC_DIR=/usr/local/hadoop/bin/../libexec ++ '[' -e /usr/local/hadoop/bin/../libexec/hadoop-config.sh ']' ++ . /usr/local/hadoop/bin/../libexec/hadoop-config.sh +++ this=/usr/local/hadoop/bin/../libexec/hadoop-config.sh +++++ dirname -- /usr/local/hadoop/bin/../libexec/hadoop-config.sh ++++ cd -P -- /usr/local/hadoop/bin/../libexec ++++ pwd -P +++ common_bin=/usr/local/hadoop/libexec ++++ basename -- /usr/local/hadoop/bin/../libexec/hadoop-config.sh +++ script=hadoop-config.sh +++ this=/usr/local/hadoop/libexec/hadoop-config.sh +++ '[' -f /usr/local/hadoop/libexec/hadoop-layout.sh ']' +++ HADOOP_COMMON_DIR=share/hadoop/common +++ HADOOP_COMMON_LIB_JARS_DIR=share/hadoop/common/lib +++ HADOOP_COMMON_LIB_NATIVE_DIR=lib/native +++ HDFS_DIR=share/hadoop/hdfs +++ HDFS_LIB_JARS_DIR=share/hadoop/hdfs/lib +++ YARN_DIR=share/hadoop/yarn +++ YARN_LIB_JARS_DIR=share/hadoop/yarn/lib +++ MAPRED_DIR=share/hadoop/mapreduce +++ MAPRED_LIB_JARS_DIR=share/hadoop/mapreduce/lib ++++ cd -P -- /usr/local/hadoop/libexec/.. ++++ pwd -P +++ HADOOP_DEFAULT_PREFIX=/usr/local/hadoop +++ HADOOP_PREFIX=/usr/local/hadoop +++ export HADOOP_PREFIX +++ '[' 2 -gt 1 ']' +++ '[' --config = namenode ']' +++ '[' -e /usr/local/hadoop/conf/hadoop-env.sh ']' +++ DEFAULT_CONF_DIR=etc/hadoop +++ export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop +++ HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop +++ [[ '' != '' ]] +++ '[' 2 -gt 1 ']' +++ '[' --hosts = namenode ']' +++ '[' --hostnames = namenode ']' +++ [[ '' != '' ]] +++ '[' -f /usr/local/hadoop/etc/hadoop/hadoop-env.sh ']' +++ . /usr/local/hadoop/etc/hadoop/hadoop-env.sh ++++ export JAVA_HOME=/usr/local/jdk1.6.0_45 ++++ JAVA_HOME=/usr/local/jdk1.6.0_45 ++++ export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop ++++ HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop ++++ for f in '$HADOOP_HOME/contrib/capacity-scheduler/.jar' ++++ '[' '' ']' ++++ export 'HADOOP_CLASSPATH=/contrib/capacity-scheduler/.jar' ++++ HADOOP_CLASSPATH='/contrib/capacity-scheduler/.jar' ++++ export 'HADOOP_OPTS= -Djava.net.preferIPv4Stack=true' ++++ HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true' ++++ export 'HADOOP_NAMENODE_OPTS=-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender ' ++++ HADOOP_NAMENODE_OPTS='-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender ' ++++ export 'HADOOP_DATANODE_OPTS=-Dhadoop.security.logger=ERROR,RFAS ' ++++ HADOOP_DATANODE_OPTS='-Dhadoop.security.logger=ERROR,RFAS ' ++++ export 'HADOOP_SECONDARYNAMENODE_OPTS=-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender ' ++++ HADOOP_SECONDARYNAMENODE_OPTS='-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender ' ++++ export HADOOP_NFS3_OPTS= ++++ HADOOP_NFS3_OPTS= ++++ export 'HADOOP_PORTMAP_OPTS=-Xmx512m ' ++++ HADOOP_PORTMAP_OPTS='-Xmx512m ' ++++ export 'HADOOP_CLIENT_OPTS=-Xmx512m ' ++++ HADOOP_CLIENT_OPTS='-Xmx512m ' ++++ export HADOOP_SECURE_DN_USER= ++++ HADOOP_SECURE_DN_USER= ++++ export HADOOP_SECURE_DN_LOG_DIR=/ ++++ HADOOP_SECURE_DN_LOG_DIR=/ ++++ export HADOOP_PID_DIR= ++++ HADOOP_PID_DIR= ++++ export HADOOP_SECURE_DN_PID_DIR= ++++ HADOOP_SECURE_DN_PID_DIR= ++++ export HADOOP_IDENT_STRING=24king ++++ HADOOP_IDENT_STRING=24king ++++ export HADOOP_PREFIX=/usr/local/hadoop ++++ HADOOP_PREFIX=/usr/local/hadoop ++++ export HADOOP_COMMON_HOME=/usr/local/hadoop ++++ HADOOP_COMMON_HOME=/usr/local/hadoop ++++ /sbin/sysctl -n net.ipv6.bindv6only +++ bindv6only= +++ '[' -n '' ']' +++ export MALLOC_ARENA_MAX=4 +++ MALLOC_ARENA_MAX=4 +++ [[ -z /usr/local/jdk1.6.0_45 ]] +++ JAVA=/usr/local/jdk1.6.0_45/bin/java +++ JAVA_HEAP_MAX=-Xmx1000m +++ '[' '' '!=' '' ']' +++ CLASSPATH=/usr/local/hadoop/etc/hadoop +++ IFS= +++ '[' /usr/local/hadoop = '' ']' +++ '[' -d /usr/local/hadoop/share/hadoop/common/webapps ']' +++ '[' -d /usr/local/hadoop/share/hadoop/common/lib ']' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/' +++ '[' '' = '' ']' +++ HADOOP_LOG_DIR=/usr/local/hadoop/logs +++ '[' '' = '' ']' +++ HADOOP_LOGFILE=hadoop.log +++ '[' '' = '' ']' +++ HADOOP_POLICYFILE=hadoop-policy.xml +++ unset IFS +++ '[' -d /usr/local/hadoop/build/native -o -d /usr/local/hadoop/lib/native ']' +++ '[' -d /usr/local/hadoop/lib/native ']' +++ '[' x '!=' x ']' +++ JAVA_LIBRARY_PATH=/usr/local/hadoop/lib/native +++ TOOL_PATH='/usr/local/hadoop/share/hadoop/tools/lib/' +++ HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs' +++ HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log' +++ HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop' +++ HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king' +++ HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console' +++ '[' x/usr/local/hadoop/lib/native '!=' x ']' +++ HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console -Djava.library.path=/usr/local/hadoop/lib/native' +++ export LD_LIBRARY_PATH=:/usr/local/hadoop/lib/native +++ LD_LIBRARY_PATH=:/usr/local/hadoop/lib/native +++ HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console -Djava.library.path=/usr/local/hadoop/lib/native -Dhadoop.policy.file=hadoop-policy.xml' +++ HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console -Djava.library.path=/usr/local/hadoop/lib/native -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true' +++ '[' '' = '' ']' +++ '[' -d /usr/local/hadoop/share/hadoop/hdfs ']' +++ export HADOOP_HDFS_HOME=/usr/local/hadoop +++ HADOOP_HDFS_HOME=/usr/local/hadoop +++ '[' -d /usr/local/hadoop/share/hadoop/hdfs/webapps ']' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs' +++ '[' -d /usr/local/hadoop/share/hadoop/hdfs/lib ']' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/' +++ '[' '' = '' ']' +++ '[' -d /usr/local/hadoop/share/hadoop/yarn ']' +++ export HADOOP_YARN_HOME=/usr/local/hadoop +++ HADOOP_YARN_HOME=/usr/local/hadoop +++ '[' -d /usr/local/hadoop/share/hadoop/yarn/webapps ']' +++ '[' -d /usr/local/hadoop/share/hadoop/yarn/lib ']' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/:/usr/local/hadoop/share/hadoop/yarn/' +++ '[' '' = '' ']' +++ '[' -d /usr/local/hadoop/share/hadoop/mapreduce ']' +++ export HADOOP_MAPRED_HOME=/usr/local/hadoop +++ HADOOP_MAPRED_HOME=/usr/local/hadoop +++ '[' /usr/local/hadoop/share/hadoop/mapreduce '!=' /usr/local/hadoop/share/hadoop/yarn ']' +++ '[' -d /usr/local/hadoop/share/hadoop/mapreduce/webapps ']' +++ '[' -d /usr/local/hadoop/share/hadoop/mapreduce/lib ']' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/:/usr/local/hadoop/share/hadoop/yarn/:/usr/local/hadoop/share/hadoop/mapreduce/lib/' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/:/usr/local/hadoop/share/hadoop/yarn/:/usr/local/hadoop/share/hadoop/mapreduce/lib/:/usr/local/hadoop/share/hadoop/mapreduce/' +++ '[' '/contrib/capacity-scheduler/.jar' '!=' '' ']' +++ '[' '' '!=' '' ']' +++ CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/:/usr/local/hadoop/share/hadoop/yarn/:/usr/local/hadoop/share/hadoop/mapreduce/lib/:/usr/local/hadoop/share/hadoop/mapreduce/:/contrib/capacity-scheduler/.jar' + '[' 2 = 0 ']' + COMMAND=namenode + shift + case $COMMAND in + '[' namenode == datanode ']' + '[' namenode = namenode ']' + CLASS=org.apache.hadoop.hdfs.server.namenode.NameNode + HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console -Djava.library.path=/usr/local/hadoop/lib/native -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender ' + export 'CLASSPATH=/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/:/usr/local/hadoop/share/hadoop/yarn/:/usr/local/hadoop/share/hadoop/mapreduce/lib/:/usr/local/hadoop/share/hadoop/mapreduce/:/contrib/capacity-scheduler/.jar' + CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/:/usr/local/hadoop/share/hadoop/yarn/:/usr/local/hadoop/share/hadoop/mapreduce/lib/:/usr/local/hadoop/share/hadoop/mapreduce/:/contrib/capacity-scheduler/.jar' + HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console -Djava.library.path=/usr/local/hadoop/lib/native -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender -Dhadoop.security.logger=INFO,NullAppender' + '[' '' = true ']' + exec /usr/local/jdk1.6.0_45/bin/java -Dproc_namenode -Xmx1000m -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console -Djava.library.path=/usr/local/hadoop/lib/native -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender -Dhadoop.security.logger=INFO,NullAppender org.apache.hadoop.hdfs.server.namenode.NameNode -format java.lang.NoClassDefFoundError: org/apache/hadoop/hdfs/server/namenode/NameNode Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.hdfs.server.namenode.NameNode at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) Could not find the main class: org.apache.hadoop.hdfs.server.namenode.NameNode. Program will exit. Exception in thread "main" 24king@24king-PC /usr/local/hadoop $ vim etc/hadoop/hadoop-env.sh
Just So, I modify the shell script hdfs add -cp
Also don't work.
- HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console -Djava.library.path=/usr/local/hadoop/lib/native -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender '
- export 'CLASSPATH=/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/:/usr/local/hadoop/share/hadoop/yarn/:/usr/local/hadoop/share/hadoop/mapreduce/lib/:/usr/local/hadoop/share/hadoop/mapreduce/:/contrib/capacity-scheduler/*.jar'
- CLASSPATH='/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/:/usr/local/hadoop/share/hadoop/yarn/:/usr/local/hadoop/share/hadoop/mapreduce/lib/:/usr/local/hadoop/share/hadoop/mapreduce/:/contrib/capacity-scheduler/*.jar'
- HADOOP_OPTS=' -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console -Djava.library.path=/usr/local/hadoop/lib/native -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender -Dhadoop.security.logger=INFO,NullAppender'
- '[' '' = true ']'
- exec /usr/local/jdk1.6.0_45/bin/java -Dproc_namenode -Xmx1000m -Djava.net.preferIPv4Stack=true -Dhadoop.log.dir=/usr/local/hadoop/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/usr/local/hadoop -Dhadoop.id.str=24king -Dhadoop.root.logger=INFO,console -Djava.library.path=/usr/local/hadoop/lib/native -Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender -Dhadoop.security.logger=INFO,NullAppender -cp '/usr/local/hadoop/etc/hadoop:/usr/local/hadoop/share/hadoop/common/lib/:/usr/local/hadoop/share/hadoop/common/:/usr/local/hadoop/share/hadoop/hdfs:/usr/local/hadoop/share/hadoop/hdfs/lib/:/usr/local/hadoop/share/hadoop/hdfs/:/usr/local/hadoop/share/hadoop/yarn/lib/:/usr/local/hadoop/share/hadoop/yarn/:/usr/local/hadoop/share/hadoop/mapreduce/lib/:/usr/local/hadoop/share/hadoop/mapreduce/:/contrib/capacity-scheduler/*.jar' org.apache.hadoop.hdfs.server.namenode.NameNode -format java.lang.NoClassDefFoundError: org/apache/hadoop/hdfs/server/namenode/NameNode Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.hdfs.server.namenode.NameNode at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) Could not find the main class: org.apache.hadoop.hdfs.server.namenode.NameNode. Program will exit. Exception in thread "main"