您的位置:首页 > 运维架构

【hadoop hive】hive建表报错:Specified key was too long; max key length is 767 bytes

2015-03-11 14:44 585 查看
hive 建表报错,

建表语句:create table table_test(id string,name string);

报错信息如下:

FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:javax.jdo.JDODataStoreException: An exception was thrown while adding/validating class(es) : Specified key was too long; max key length is 767 bytes

com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: Specified key was too long; max key length is 767 bytes

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)

at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

at java.lang.reflect.Constructor.newInstance(Constructor.java:526)

at com.mysql.jdbc.Util.handleNewInstance(Util.java:406)

at com.mysql.jdbc.Util.getInstance(Util.java:381)

at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1030)

at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)

at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3558)

at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3490)

at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1959)

at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2109)

at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2642)

at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2571)

at com.mysql.jdbc.StatementImpl.execute(StatementImpl.java:782)

at com.mysql.jdbc.StatementImpl.execute(StatementImpl.java:625)

at com.jolbox.bonecp.StatementHandle.execute(StatementHandle.java:254)

at org.datanucleus.store.rdbms.table.AbstractTable.executeDdlStatement(AbstractTable.java:760)

at org.datanucleus.store.rdbms.table.AbstractTable.executeDdlStatementList(AbstractTable.java:711)

at org.datanucleus.store.rdbms.table.AbstractTable.create(AbstractTable.java:425)

at org.datanucleus.store.rdbms.table.AbstractTable.exists(AbstractTable.java:488)

at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:3380)

at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:3190)

at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2841)

at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:122)

at org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:1605)

at org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:954)

at org.datanucleus.store.rdbms.RDBMSStoreManager.getDatastoreClass(RDBMSStoreManager.java:679)

at org.datanucleus.store.rdbms.RDBMSStoreManager.getPropertiesForGenerator(RDBMSStoreManager.java:2045)

at org.datanucleus.store.AbstractStoreManager.getStrategyValue(AbstractStoreManager.java:1365)

at org.datanucleus.ExecutionContextImpl.newObjectId(ExecutionContextImpl.java:3827)

at org.datanucleus.state.JDOStateManager.setIdentity(JDOStateManager.java:2571)

at org.datanucleus.state.JDOStateManager.initialiseForPersistentNew(JDOStateManager.java:513)

at org.datanucleus.state.ObjectProviderFactoryImpl.newForPersistentNew(ObjectProviderFactoryImpl.java:232)

at org.datanucleus.ExecutionContextImpl.newObjectProviderForPersistentNew(ExecutionContextImpl.java:1414)

at org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2218)

at org.datanucleus.ExecutionContextImpl.persistObjectWork(ExecutionContextImpl.java:2065)

at org.datanucleus.ExecutionContextImpl.persistObject(ExecutionContextImpl.java:1913)

at org.datanucleus.ExecutionContextThreadedImpl.persistObject(ExecutionContextThreadedImpl.java:217)

at org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:727)

at org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:752)

at org.apache.hadoop.hive.metastore.ObjectStore.createTable(ObjectStore.java:784)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:606)

at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:98)

at com.sun.proxy.$Proxy4.createTable(Unknown Source)

at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_core(HiveMetaStore.java:1374)

at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_with_environment_context(HiveMetaStore.java:1407)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:606)

at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:102)

at com.sun.proxy.$Proxy5.create_table_with_environment_context(Unknown Source)

at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.create_table_with_environment_context(HiveMetaStoreClient.java:1884)

at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.create_table_with_environment_context(SessionHiveMetaStoreClient.java:96)

at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:607)

at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:595)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:606)

at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:90)

at com.sun.proxy.$Proxy6.createTable(Unknown Source)

at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:670)

at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:3959)

at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:295)

at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)

at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85)

at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1604)

at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1364)

at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1177)

at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1004)

at org.apache.hadoop.hive.ql.Driver.run(Driver.java:994)

at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:247)

at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:199)

at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:410)

at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:783)

at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:677)

at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:616)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:606)

at org.apache.hadoop.util.RunJar.main(RunJar.java:156)

at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451)

at org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:732)

at org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:752)

at org.apache.hadoop.hive.metastore.ObjectStore.createTable(ObjectStore.java:784)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:606)

at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:98)

at com.sun.proxy.$Proxy4.createTable(Unknown Source)

at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_core(HiveMetaStore.java:1374)

at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_with_environment_context(HiveMetaStore.java:1407)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:606)

at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:102)

at com.sun.proxy.$Proxy5.create_table_with_environment_context(Unknown Source)

at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.create_table_with_environment_context(HiveMetaStoreClient.java:1884)

at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.create_table_with_environment_context(SessionHiveMetaStoreClient.java:96)

at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:607)

at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:595)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:606)

at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:90)

at com.sun.proxy.$Proxy6.createTable(Unknown Source)

at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:670)

at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:3959)

at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:295)

at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)

at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85)

at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1604)

at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1364)

at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1177)

at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1004)

at org.apache.hadoop.hive.ql.Driver.run(Driver.java:994)

at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:247)

at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:199)

at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:410)

at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:783)

at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:677)

at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:616)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:606)

at org.apache.hadoop.util.RunJar.main(RunJar.java:156)

NestedThrowablesStackTrace:

com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: Specified key was too long; max key length is 767 bytes

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)

。。。。

解决方案:

在mysql机器的上命令行中运行:

alter database hive character set latin1;

问题解决.
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: 
相关文章推荐