Caused by: java.net.ConnectException: Connection refused (Connection refused)
at java.net.PlainSocketImpl.socketConnect(Native Method)
at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350)
at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206)
at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188)
at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
at java.net.Socket.connect(Socket.java:589)
at java.net.Socket.connect(Socket.java:538)
at java.net.Socket.<init>(Socket.java:434)
at java.net.Socket.<init>(Socket.java:244)
at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:253)
at com.mysql.jdbc.MysqlIO.<init>(MysqlIO.java:280)
at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2026)
... 61 more
------ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at com.jolbox.bonecp.PoolUtil.generateSQLException(PoolUtil.java:192)
at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:422)
at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)
at org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
at org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
at java.security.AccessController.doPrivileged(Native Method)
at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365)
at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394)
at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291)
at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5757)
at org.apache.hadoop.hive.metastore.HiveMetaStore.startMetaStore(HiveMetaStore.java:5990)
at org.apache.hadoop.hive.metastore.HiveMetaStore.main(HiveMetaStore.java:5915)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
Caused by: com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure Last packet sent to the server was 0 ms ago.
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at com.mysql.jdbc.Util.handleNewInstance(Util.java:406)
at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:1074)
at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2103)
at com.mysql.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:718)
at com.mysql.jdbc.JDBC4Connection.<init>(JDBC4Connection.java:46)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at com.mysql.jdbc.Util.handleNewInstance(Util.java:406)
at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:302)
at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:282)
at java.sql.DriverManager.getConnection(DriverManager.java:664)
at java.sql.DriverManager.getConnection(DriverManager.java:208)
at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)
... 48 more

  启动 hive 报错,提示连接 mysql 失败,查看对应的进程发现 mysql 没有启动,将mysql启动就行了。

# 启动mysql
service mysqld start

  

Hive元数据启动失败的更多相关文章

  1. Hive元数据启动失败,端口被占用

    org.apache.thrift.transport.TTransportException: Could not create ServerSocket on address 0.0.0.0/0. ...

  2. ambari下 hive metastore 启动失败

    由字符集引起的hive 元数据进程启动失败 解决方法新增 这2句话 reload(sys)sys.setdefaultencoding('utf8')

  3. 如何监听对 HIVE 元数据的操作

    目录 简介 HIVE 基本操作 获取 HIVE 源码 编译 HIVE 源码 启动 HIVE 停止 HIVE 监听对 HIVE 元数据的操作 参考文档 简介 公司有个元数据管理平台,会定期同步 HIVE ...

  4. CDH hive metastore启动报错:Unknown column 'A0.SCHEMA_VERSION_V2' in 'field list'

    新集群CDH版本,刚刚搭建起来,5个节点起了1个hive服务,另外5个节点又单独起了1个hive服务,一共2个人hive服务.老哥对其中的一个hive进行了数据迁移,对hive数据库进行了替换,就这样 ...

  5. spark on yarn模式下配置spark-sql访问hive元数据

    spark on yarn模式下配置spark-sql访问hive元数据 目的:在spark on yarn模式下,执行spark-sql访问hive的元数据.并对比一下spark-sql 和hive ...

  6. hive cli 启动缓慢问题

    hive-0.13.1启动缓慢的原因 发现时间主要消耗在以下3个地方: 1. hadoopjar的时候要把相关的jar包上传到hdfs中(这里大概消耗5s,hive0.11一样,这个地方不太好优化) ...

  7. hadoop namenode启动失败

    hadoop version=3.1.2 生产环境中,一台namenode节点突然挂掉了,,重新启动失败,日志如下: Info=-64%3A1391355681%3A1545175191847%3AC ...

  8. Hive实现自增序列及常见的Hive元数据问题处理

    Hive实现自增序列 在利用数据仓库进行数据处理时,通常有这样一个业务场景,为一个Hive表新增一列自增字段(比如事实表和维度表之间的"代理主键").虽然Hive不像RDBMS如m ...

  9. docker启动失败如何查看容器日志

    docker启动失败如何查看容器日志 在使用docker的时候,在某些未知的情况下可能启动了容器,但是过了没几秒容器自动退出了.这个时候如何排查问题呢? 通常碰到这种情况无非就是环境有问题或者应用有问 ...

随机推荐

  1. elastcisearch中文分词器各个版本

    地址 https://github.com/medcl/elasticsearch-analysis-ik/releases?after=v6.0.1

  2. 在spring的过滤器中注入实体类(@autowire会失效可使用这个方法)

    转载:难得可贵的好文章 https://blog.csdn.net/chl191623691/article/details/78657638 首先,本文   绝对是好文!不止本文,作者的文章都是很经 ...

  3. JAVA吃货联盟项目。

    1基础通过数组.选择结构.循环等实 现吃货联盟订餐系统. 1.我要订餐: 2.查看餐袋: 3.签收订单: 4.删除订单: 5.我要点赞: 6.退出系统: public class Doy001 { p ...

  4. .Net 遍历目录下第一层的子文件夹和子文件夹里的文件

    今天再完成一道任务的时候需要遍历得到所有txt文件,搜索很久终于得到了一个很方便的方法. foreach (string o in Directory.GetDirectories(@"D: ...

  5. Eclipse Mars.2集成Maven 3.5.4

    准备材料: Eclipse Mars.2 Release (4.5.2):  官网戳:https://www.eclipse.org/downloads/ Maven 3.5.4: http://ma ...

  6. Objective-C 类型转换

    类型转换通常是指变量,从一种类型转换成另外一种类型.例如将一个long类型转换成int类型,变量转换通常 用下面的方式: (type_name) expression 在Objective-C中,我们 ...

  7. 洛谷 P2253 好一个一中腰鼓!

    题目背景 话说我大一中的运动会就要来了,据本班同学剧透(其实早就知道了),我萌萌的初二年将要表演腰鼓[喷],这个无厘头的题目便由此而来. Ivan乱入:“忽一人大呼:‘好一个安塞腰鼓!’满座寂然,无敢 ...

  8. 洛谷 P2353 背单词

    题目背景 小明对英语一窍不通,令老师十分头疼.于是期末考试前夕,小明被逼着开始背单词…… 题目描述 老师给了小明一篇长度为N的英语文章,然后让小明背M个单词.为了确保小明不会在背单词时睡着,老师会向他 ...

  9. 如何计算CDS view里两个时间戳之间的天数间隔

    ABAP透明表里的时间戳,数据类型为dec: 有个需求:计算这两个时间戳之间的天数间隔,丢弃时间戳年-月-日8位后面的小时:分钟:秒. 举个例子:如果时间戳是20180918173132,丢弃1731 ...

  10. python 基础之运算符

    运算符 a=10 ,b=20 运算符 描述 实例 + 加 - 两个对象相加 a + b 输出结果 30 - 减 - 得到负数或是一个数减去另一个数 a - b 输出结果 -10 * 乘 - 两个数相乘 ...