HIVE 安装系列(1)安装部署 遇到虚拟机磁盘空间满 清理空间 折腾了半天
2016-03-01 22:12
971 查看
1\上传apache-hive-1.2.1-bin.tar.gz
2、安装结果
![](http://img.blog.csdn.net/20160301223757283?watermark/2/text/aHR0cDovL2Jsb2cuY3Nkbi5uZXQv/font/5a6L5L2T/fontsize/400/fill/I0JBQkFCMA==/dissolve/70/gravity/Center)
[root@localhost setup_tools]#mv apache-hive-1.2.1-bin /usr/local
[root@localhost setup_tools]#cd ..
[root@localhost local]#ls
3、[root@localhost local]#vi /etc/profile
export HIVE_HOME=/usr/local/apache-hive-1.2.1
export PATH=.:$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$SCALA_HOME/bin:$SPARK_HOME/bin:$HIVE_HOME/bin
4、[root@localhost local]#source /etc/profile
[root@localhost local]#hive
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-
1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/spark-1.6.0-bin-hadoop2.6/lib/spark-assembly-1.6.0-
hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-
1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/spark-1.6.0-bin-hadoop2.6/lib/spark-assembly-1.6.0-
hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
Logging initialized using configuration in jar:file:/usr/local/apache-hive-1.2.1/lib/hive-common-
1.2.1.jar!/hive-log4j.properties
Java HotSpot(TM) Client VM warning: You have loaded library /tmp/libnetty-transport-native-
epoll3860804931061913421.so which might have disabled stack guard. The VM will try to fix the stack guard now.
It's highly recommended that you fix the library with 'execstack -c <libfile>', or link it with '-z
noexecstack'.
Exception in thread "main" java.lang.RuntimeException: java.net.ConnectException: Call From localhost/127.0.0.1
to Master:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details
see: http://wiki.apache.org/hadoop/ConnectionRefused
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:677)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
Caused by: java.net.ConnectException: Call From localhost/127.0.0.1 to Master:9000 failed on connection
exception: java.net.ConnectException: Connection refused; For more details see:
http://wiki.apache.org/hadoop/ConnectionRefused
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:791)
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:731)
at org.apache.hadoop.ipc.Client.call(Client.java:1472)
at org.apache.hadoop.ipc.Client.call(Client.java:1399)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at com.sun.proxy.$Proxy13.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo
(ClientNamenodeProtocolTranslatorPB.java:752)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy14.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1988)
at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:1118)
at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:1114)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1114)
at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1400)
at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:596)
at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
... 8 more
Caused by: java.net.ConnectException: Connection refused
at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:607)
at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:705)
at org.apache.hadoop.ipc.Client$Connection.access$2800(Client.java:368)
at org.apache.hadoop.ipc.Client.getConnection(Client.java:1521)
at org.apache.hadoop.ipc.Client.call(Client.java:1438)
... 28 more
[root@localhost local]#ls
5、删除一个jar包 先备份
/usr/local/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar
[root@localhost lib]#cp slf4j-log4j12-1.7.5.jar slf4j-log4j12-1.7.5.jar.dzh_bak
[root@localhost lib]#rm slf4j-log4j12-1.7.5.jar
rm: remove regular file `slf4j-log4j12-1.7.5.jar'? y
[root@localhost lib]#ls
[root@localhost lib]#pwd
/usr/local/hadoop-2.6.0/share/hadoop/common/lib
[root@localhost lib]#
6
Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.SafeModeException):
Cannot create directory /tmp/hive/root/4db0dfe3-8d63-4b4e-9ea7-498f4e0f47d9. Name node is in safe mode.
7、关闭hadoop的安全模式
[root@localhost bin]#hadoop dfsadmin -safemode leave
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
16/03/01 08:49:02 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using
builtin-java classes where applicable
Safe mode is OFF
[root@localhost bin]#
[root@localhost tmp]#mkdir hive
[root@localhost tmp]#chmod 777 hive
[root@localhost tmp]#ls
[root@localhost hive]#pwd
/tmp/hive
[root@localhost hive]#
8、[root@localhost tmp]#df -k
Filesystem 1K-blocks Used Available Use% Mounted on
/dev/sda2 5921544 5706444 0 100% /
tmpfs 515340 12 515328 1% /dev/shm
/dev/sda1 297485 31772 250353 12% /boot
/dev/sr0 544858 544858 0 100% /media/OFFICE12
[root@localhost tmp]#
查找文件大小/article/4663291.html
[root@master local]#du -h --max-depth=2 | sort -n
[root@master ~]#du -h --max-depth=2 | sort -n
1.1M ./metastore_db/log
3.0M ./metastore_db/seg0
4.0K ./metastore_db/tmp
4.0M ./metastore_db
4.2M .
8.0K ./.oracle_jre_usage
20K ./.ssh
[root@master ~]#du -h --max-depth=1 | sort -n
4.0M ./metastore_db
4.2M .
8.0K ./.oracle_jre_usage
20K ./.ssh
[root@master ~]#cd /usr/local
[root@master local]#du -h --max-depth=2 | sort -n
1.1M ./metastore_db/log
1.3M ./hadoop-1.2.1/c++
1.3M ./hadoop-1.2.1/logs
1.5M ./hadoop-1.2.1/contrib
1.6M ./apache-hive-1.2.1/examples
1.7M ./spark-1.6.0-bin-hadoop2.6/examples
1.8G .
1.8M ./jdk1.7.0_79/man
2.0M ./jdk1.8.0_65/man
2.2M ./apache-hive-1.2.1/scripts
2.3M ./spark-1.6.0-bin-hadoop2.6/python
2.7M ./hadoop-2.6.0/file:
2.9M ./metastore_db/seg0
3.9M ./metastore_db
3.9M ./spark-1.6.0-bin-hadoop2.6/sbin
4.0K ./bin
4.0K ./etc
4.0K ./games
4.0K ./include
4.0K ./lib
4.0K ./libexec
4.0K ./metastore_db/tmp
4.0K ./sbin
4.0K ./setup_tools
4.0K ./share/info
4.0K ./spark-1.0.0-bin-hadoop1/work
4.0K ./src
4.0M ./scala-2.10.4/src
4.1M ./apache-hive-1.2.1/bin
4.1M ./spark-1.6.0-bin-hadoop2.6/bin
4.8M ./spark-1.6.0-bin-hadoop2.6/R
4.9M ./hadoop-2.6.0/lib
5.0M ./apache-hive-1.2.1/hcatalog
5.0M ./jdk1.7.0_79/db
5.7M ./jdk1.8.0_65/db
8.0K ./share/applications
8.0K ./spark-1.6.0-bin-hadoop2.6/IMF2016
8.5M ./hadoop-2.6.0/logs
16K ./IMFdatatest/invertedindex
28M ./IMFdatatest
28M ./IMFdatatest/RhzfApacheUrl
28M ./scala-2.10.4/lib
32K ./IMFlinuxshell
32K ./spark-1.0.0-bin-hadoop1/conf
33M ./scala-2.10.4
36K ./IMFdatatest/
40K ./hadoop-1.2.1/.eclipse.templates
52K ./hadoop-2.6.0/testdata
52K ./spark-1.6.0-bin-hadoop2.6/conf
56K ./scala-2.10.4/man
60K ./hadoop-1.2.1/libexec
60K ./hadoop-2.6.0/include
60K ./spark-1.0.0-bin-hadoop1/logs
64K ./hadoop-2.6.0/libexec
64K ./spark-1.0.0-bin-hadoop1/sbin
65M ./hadoop-2.6.0/tmp
72K ./hadoop-1.2.1/sbin
76M ./hadoop-1.2.1/lib
80K ./hadoop-1.2.1/src
80K ./share/man
84K ./hadoop-1.2.1/conf
84K ./hadoop-1.2.1/share
84K ./scala-2.10.4/bin
84K ./spark-1.0.0-bin-hadoop1/bin
88K ./spark-1.6.0-bin-hadoop2.6/ec2
90M ./hadoop-1.2.1
92K ./spark-1.6.0-bin-hadoop2.6/work
96K ./share
97M ./apache-hive-1.2.1/lib
111M ./apache-hive-1.2.1
124K ./hadoop-2.6.0/sbin
129M ./jdk1.7.0_79/lib
133M ./jdk1.8.0_65/lib
136K ./spark-1.6.0-bin-hadoop2.6/logs
144K ./scala-2.10.4/doc
147M ./jdk1.7.0_79/jre
148K ./hadoop-1.2.1/bin
156K ./spark-1.6.0-bin-hadoop2.6/licenses
180K ./scala-2.10.4/misc
183M ./spark-1.0.0-bin-hadoop1/lib
186M ./spark-1.0.0-bin-hadoop1
187M ./jdk1.8.0_65/jre
192K ./apache-hive-1.2.1/conf
208K ./jdk1.7.0_79/include
208K ./jdk1.8.0_65/include
260K ./scala-2.10.4/examples
303M ./jdk1.7.0_79
303M ./spark-1.6.0-bin-hadoop2.6/lib
317M ./hadoop-2.6.0/share
322M ./spark-1.6.0-bin-hadoop2.6
324K ./hadoop-2.6.0/etc
353M ./jdk1.8.0_65
396K ./hadoop-1.2.1/webapps
399M ./hadoop-2.6.0
444K ./hadoop-2.6.0/bin
520K ./spark-1.0.0-bin-hadoop1/examples
604K ./spark-1.0.0-bin-hadoop1/python
728K ./jdk1.7.0_79/bin
756K ./jdk1.8.0_65/bin
908K ./spark-1.0.0-bin-hadoop1/ec2
912K ./spark-1.6.0-bin-hadoop2.6/data
944K ./hadoop-1.2.1/ivy
[root@master sbin]#hive
Logging initialized using configuration in jar:file:/usr/local/apache-hive-1.2.1/lib/hive-common-
1.2.1.jar!/hive-log4j.properties
Java HotSpot(TM) Client VM warning: You have loaded library /tmp/libnetty-transport-native-
epoll565453623468795860.so which might have disabled stack guard. The VM will try to fix the stack guard now.
It's highly recommended that you fix the library with 'execstack -c <libfile>', or link it with '-z
noexecstack'.
[ERROR] Terminal initialization failed; falling back to unsupported
java.lang.IncompatibleClassChangeError: Found class jline.Terminal, but interface was expected
at jline.TerminalFactory.create(TerminalFactory.java:101)
at jline.TerminalFactory.get(TerminalFactory.java:158)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:229)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:221)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:209)
at org.apache.hadoop.hive.cli.CliDriver.setupConsoleReader(CliDriver.java:787)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:721)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
Exception in thread "main" java.lang.IncompatibleClassChangeError: Found class jline.Terminal, but interface
was expected
at jline.console.ConsoleReader.<init>(ConsoleReader.java:230)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:221)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:209)
at org.apache.hadoop.hive.cli.CliDriver.setupConsoleReader(CliDriver.java:787)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:721)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
[root@master sbin]#hadoop dfs -ls /
/article/8726727.html
1.Delete jline from the Hadoop lib directory (it's only pulled in transitively from ZooKeeper).
2.export HADOOP_USER_CLASSPATH_FIRST=true
[root@master sbin]#export HADOOP_USER_CLASSPATH_FIRST=true
[root@master sbin]#hive
Logging initialized using configuration in jar:file:/usr/local/apache-hive-1.2.1/lib/hive-common-1.2.1.jar!/hive-log4j.properties
Java HotSpot(TM) Client VM warning: You have loaded library /tmp/libnetty-transport-native-epoll8495691433446510379.so which might have disabled stack guard. The VM will try to fix the stack guard now.
It's highly recommended that you fix the library with 'execstack -c <libfile>', or link it with '-z noexecstack'.
hive>
2、安装结果
[root@localhost setup_tools]#mv apache-hive-1.2.1-bin /usr/local
[root@localhost setup_tools]#cd ..
[root@localhost local]#ls
3、[root@localhost local]#vi /etc/profile
export HIVE_HOME=/usr/local/apache-hive-1.2.1
export PATH=.:$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$SCALA_HOME/bin:$SPARK_HOME/bin:$HIVE_HOME/bin
4、[root@localhost local]#source /etc/profile
[root@localhost local]#hive
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-
1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/spark-1.6.0-bin-hadoop2.6/lib/spark-assembly-1.6.0-
hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-
1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/spark-1.6.0-bin-hadoop2.6/lib/spark-assembly-1.6.0-
hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
Logging initialized using configuration in jar:file:/usr/local/apache-hive-1.2.1/lib/hive-common-
1.2.1.jar!/hive-log4j.properties
Java HotSpot(TM) Client VM warning: You have loaded library /tmp/libnetty-transport-native-
epoll3860804931061913421.so which might have disabled stack guard. The VM will try to fix the stack guard now.
It's highly recommended that you fix the library with 'execstack -c <libfile>', or link it with '-z
noexecstack'.
Exception in thread "main" java.lang.RuntimeException: java.net.ConnectException: Call From localhost/127.0.0.1
to Master:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details
see: http://wiki.apache.org/hadoop/ConnectionRefused
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:677)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
Caused by: java.net.ConnectException: Call From localhost/127.0.0.1 to Master:9000 failed on connection
exception: java.net.ConnectException: Connection refused; For more details see:
http://wiki.apache.org/hadoop/ConnectionRefused
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:791)
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:731)
at org.apache.hadoop.ipc.Client.call(Client.java:1472)
at org.apache.hadoop.ipc.Client.call(Client.java:1399)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at com.sun.proxy.$Proxy13.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo
(ClientNamenodeProtocolTranslatorPB.java:752)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy14.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1988)
at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:1118)
at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:1114)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1114)
at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1400)
at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:596)
at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
... 8 more
Caused by: java.net.ConnectException: Connection refused
at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:607)
at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:705)
at org.apache.hadoop.ipc.Client$Connection.access$2800(Client.java:368)
at org.apache.hadoop.ipc.Client.getConnection(Client.java:1521)
at org.apache.hadoop.ipc.Client.call(Client.java:1438)
... 28 more
[root@localhost local]#ls
5、删除一个jar包 先备份
/usr/local/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar
[root@localhost lib]#cp slf4j-log4j12-1.7.5.jar slf4j-log4j12-1.7.5.jar.dzh_bak
[root@localhost lib]#rm slf4j-log4j12-1.7.5.jar
rm: remove regular file `slf4j-log4j12-1.7.5.jar'? y
[root@localhost lib]#ls
[root@localhost lib]#pwd
/usr/local/hadoop-2.6.0/share/hadoop/common/lib
[root@localhost lib]#
6
Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.SafeModeException):
Cannot create directory /tmp/hive/root/4db0dfe3-8d63-4b4e-9ea7-498f4e0f47d9. Name node is in safe mode.
7、关闭hadoop的安全模式
[root@localhost bin]#hadoop dfsadmin -safemode leave
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
16/03/01 08:49:02 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using
builtin-java classes where applicable
Safe mode is OFF
[root@localhost bin]#
[root@localhost tmp]#mkdir hive
[root@localhost tmp]#chmod 777 hive
[root@localhost tmp]#ls
[root@localhost hive]#pwd
/tmp/hive
[root@localhost hive]#
8、[root@localhost tmp]#df -k
Filesystem 1K-blocks Used Available Use% Mounted on
/dev/sda2 5921544 5706444 0 100% /
tmpfs 515340 12 515328 1% /dev/shm
/dev/sda1 297485 31772 250353 12% /boot
/dev/sr0 544858 544858 0 100% /media/OFFICE12
[root@localhost tmp]#
查找文件大小/article/4663291.html
[root@master local]#du -h --max-depth=2 | sort -n
[root@master ~]#du -h --max-depth=2 | sort -n
1.1M ./metastore_db/log
3.0M ./metastore_db/seg0
4.0K ./metastore_db/tmp
4.0M ./metastore_db
4.2M .
8.0K ./.oracle_jre_usage
20K ./.ssh
[root@master ~]#du -h --max-depth=1 | sort -n
4.0M ./metastore_db
4.2M .
8.0K ./.oracle_jre_usage
20K ./.ssh
[root@master ~]#cd /usr/local
[root@master local]#du -h --max-depth=2 | sort -n
1.1M ./metastore_db/log
1.3M ./hadoop-1.2.1/c++
1.3M ./hadoop-1.2.1/logs
1.5M ./hadoop-1.2.1/contrib
1.6M ./apache-hive-1.2.1/examples
1.7M ./spark-1.6.0-bin-hadoop2.6/examples
1.8G .
1.8M ./jdk1.7.0_79/man
2.0M ./jdk1.8.0_65/man
2.2M ./apache-hive-1.2.1/scripts
2.3M ./spark-1.6.0-bin-hadoop2.6/python
2.7M ./hadoop-2.6.0/file:
2.9M ./metastore_db/seg0
3.9M ./metastore_db
3.9M ./spark-1.6.0-bin-hadoop2.6/sbin
4.0K ./bin
4.0K ./etc
4.0K ./games
4.0K ./include
4.0K ./lib
4.0K ./libexec
4.0K ./metastore_db/tmp
4.0K ./sbin
4.0K ./setup_tools
4.0K ./share/info
4.0K ./spark-1.0.0-bin-hadoop1/work
4.0K ./src
4.0M ./scala-2.10.4/src
4.1M ./apache-hive-1.2.1/bin
4.1M ./spark-1.6.0-bin-hadoop2.6/bin
4.8M ./spark-1.6.0-bin-hadoop2.6/R
4.9M ./hadoop-2.6.0/lib
5.0M ./apache-hive-1.2.1/hcatalog
5.0M ./jdk1.7.0_79/db
5.7M ./jdk1.8.0_65/db
8.0K ./share/applications
8.0K ./spark-1.6.0-bin-hadoop2.6/IMF2016
8.5M ./hadoop-2.6.0/logs
16K ./IMFdatatest/invertedindex
28M ./IMFdatatest
28M ./IMFdatatest/RhzfApacheUrl
28M ./scala-2.10.4/lib
32K ./IMFlinuxshell
32K ./spark-1.0.0-bin-hadoop1/conf
33M ./scala-2.10.4
36K ./IMFdatatest/
40K ./hadoop-1.2.1/.eclipse.templates
52K ./hadoop-2.6.0/testdata
52K ./spark-1.6.0-bin-hadoop2.6/conf
56K ./scala-2.10.4/man
60K ./hadoop-1.2.1/libexec
60K ./hadoop-2.6.0/include
60K ./spark-1.0.0-bin-hadoop1/logs
64K ./hadoop-2.6.0/libexec
64K ./spark-1.0.0-bin-hadoop1/sbin
65M ./hadoop-2.6.0/tmp
72K ./hadoop-1.2.1/sbin
76M ./hadoop-1.2.1/lib
80K ./hadoop-1.2.1/src
80K ./share/man
84K ./hadoop-1.2.1/conf
84K ./hadoop-1.2.1/share
84K ./scala-2.10.4/bin
84K ./spark-1.0.0-bin-hadoop1/bin
88K ./spark-1.6.0-bin-hadoop2.6/ec2
90M ./hadoop-1.2.1
92K ./spark-1.6.0-bin-hadoop2.6/work
96K ./share
97M ./apache-hive-1.2.1/lib
111M ./apache-hive-1.2.1
124K ./hadoop-2.6.0/sbin
129M ./jdk1.7.0_79/lib
133M ./jdk1.8.0_65/lib
136K ./spark-1.6.0-bin-hadoop2.6/logs
144K ./scala-2.10.4/doc
147M ./jdk1.7.0_79/jre
148K ./hadoop-1.2.1/bin
156K ./spark-1.6.0-bin-hadoop2.6/licenses
180K ./scala-2.10.4/misc
183M ./spark-1.0.0-bin-hadoop1/lib
186M ./spark-1.0.0-bin-hadoop1
187M ./jdk1.8.0_65/jre
192K ./apache-hive-1.2.1/conf
208K ./jdk1.7.0_79/include
208K ./jdk1.8.0_65/include
260K ./scala-2.10.4/examples
303M ./jdk1.7.0_79
303M ./spark-1.6.0-bin-hadoop2.6/lib
317M ./hadoop-2.6.0/share
322M ./spark-1.6.0-bin-hadoop2.6
324K ./hadoop-2.6.0/etc
353M ./jdk1.8.0_65
396K ./hadoop-1.2.1/webapps
399M ./hadoop-2.6.0
444K ./hadoop-2.6.0/bin
520K ./spark-1.0.0-bin-hadoop1/examples
604K ./spark-1.0.0-bin-hadoop1/python
728K ./jdk1.7.0_79/bin
756K ./jdk1.8.0_65/bin
908K ./spark-1.0.0-bin-hadoop1/ec2
912K ./spark-1.6.0-bin-hadoop2.6/data
944K ./hadoop-1.2.1/ivy
[root@master sbin]#hive
Logging initialized using configuration in jar:file:/usr/local/apache-hive-1.2.1/lib/hive-common-
1.2.1.jar!/hive-log4j.properties
Java HotSpot(TM) Client VM warning: You have loaded library /tmp/libnetty-transport-native-
epoll565453623468795860.so which might have disabled stack guard. The VM will try to fix the stack guard now.
It's highly recommended that you fix the library with 'execstack -c <libfile>', or link it with '-z
noexecstack'.
[ERROR] Terminal initialization failed; falling back to unsupported
java.lang.IncompatibleClassChangeError: Found class jline.Terminal, but interface was expected
at jline.TerminalFactory.create(TerminalFactory.java:101)
at jline.TerminalFactory.get(TerminalFactory.java:158)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:229)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:221)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:209)
at org.apache.hadoop.hive.cli.CliDriver.setupConsoleReader(CliDriver.java:787)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:721)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
Exception in thread "main" java.lang.IncompatibleClassChangeError: Found class jline.Terminal, but interface
was expected
at jline.console.ConsoleReader.<init>(ConsoleReader.java:230)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:221)
at jline.console.ConsoleReader.<init>(ConsoleReader.java:209)
at org.apache.hadoop.hive.cli.CliDriver.setupConsoleReader(CliDriver.java:787)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:721)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
[root@master sbin]#hadoop dfs -ls /
/article/8726727.html
1.Delete jline from the Hadoop lib directory (it's only pulled in transitively from ZooKeeper).
2.export HADOOP_USER_CLASSPATH_FIRST=true
[root@master sbin]#export HADOOP_USER_CLASSPATH_FIRST=true
[root@master sbin]#hive
Logging initialized using configuration in jar:file:/usr/local/apache-hive-1.2.1/lib/hive-common-1.2.1.jar!/hive-log4j.properties
Java HotSpot(TM) Client VM warning: You have loaded library /tmp/libnetty-transport-native-epoll8495691433446510379.so which might have disabled stack guard. The VM will try to fix the stack guard now.
It's highly recommended that you fix the library with 'execstack -c <libfile>', or link it with '-z noexecstack'.
hive>
相关文章推荐
- iOS自己封装控件
- 【小笔记】自定义返回按钮
- RabbitMQ Broker管理
- PAT-A1019 General Palindromic Number(20)(模拟)
- java异常面试题及编程题
- C语言中的多文件执行
- 在这片神奇的土地上
- Same Tree
- D5
- 创建一个学生类,按照要求生成各种方法,然后使用数学方法计算某一个元素的规定结果
- 如何实现动态规划?——TWO
- CSS---!important
- MyEclipse使用总结——MyEclipse去除网上复制下来的来代码带有的行号
- 使用Core Animation对象来实现动画
- 使用Core Animation对象来实现动画
- ♥POJ 3278-Catch That Cow【搜索】
- 项链
- cocos2d-x之文件读写
- Codeforces Round #259 (Div. 2) B. Little Pony and Sort by Shift
- C++利用函数调用实现数组数据的输入输出