用start job -jid 1报错
Exception has occurred during processing command
Exception: org.apache.sqoop.common.SqoopException Message: CLIENT_0001:Server has returned exception
Stack trace:
at org.apache.sqoop.client.request.ResourceRequest (ResourceRequest.java:129)
at org.apache.sqoop.client.request.ResourceRequest (ResourceRequest.java:179)
at org.apache.sqoop.client.request.JobResourceRequest (JobResourceRequest.java:112)
at org.apache.sqoop.client.request.SqoopResourceRequests (SqoopResourceRequests.java:157)
at org.apache.sqoop.client.SqoopClient (SqoopClient.java:452)
at org.apache.sqoop.shell.StartJobFunction (StartJobFunction.java:80)
at org.apache.sqoop.shell.SqoopFunction (SqoopFunction.java:51)
at org.apache.sqoop.shell.SqoopCommand (SqoopCommand.java:135)
at org.apache.sqoop.shell.SqoopCommand (SqoopCommand.java:111)
at org.codehaus.groovy.tools.shell.Command$execute (null:-1)
at org.codehaus.groovy.runtime.callsite.CallSiteArray (CallSiteArray.java:42)
at org.codehaus.groovy.tools.shell.Command$execute (null:-1)
at org.codehaus.groovy.tools.shell.Shell (Shell.groovy:101)
at org.codehaus.groovy.tools.shell.Groovysh (Groovysh.groovy:-1)
at sun.reflect.GeneratedMethodAccessor23 (null:-1)
at sun.reflect.DelegatingMethodAccessorImpl (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method (Method.java:498)
at org.codehaus.groovy.reflection.CachedMethod (CachedMethod.java:90)
at groovy.lang.MetaMethod (MetaMethod.java:233)
at groovy.lang.MetaClassImpl (MetaClassImpl.java:1054)
at org.codehaus.groovy.runtime.ScriptBytecodeAdapter (ScriptBytecodeAdapter.java:128)
at org.codehaus.groovy.tools.shell.Groovysh (Groovysh.groovy:173)
at sun.reflect.GeneratedMethodAccessor22 (null:-1)
at sun.reflect.DelegatingMethodAccessorImpl (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method (Method.java:498)
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite$PogoCachedMethodSiteNoUnwrapNoCoerce (PogoMetaMethodSite.java:267)
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite (PogoMetaMethodSite.java:52)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite (AbstractCallSite.java:141)
at org.codehaus.groovy.tools.shell.Groovysh (Groovysh.groovy:121)
at org.codehaus.groovy.tools.shell.Shell (Shell.groovy:114)
at org.codehaus.groovy.tools.shell.Shell$leftShift$0 (null:-1)
at org.codehaus.groovy.tools.shell.ShellRunner (ShellRunner.groovy:88)
at org.codehaus.groovy.tools.shell.InteractiveShellRunner (InteractiveShellRunner.groovy:-1)
at sun.reflect.GeneratedMethodAccessor20 (null:-1)
at sun.reflect.DelegatingMethodAccessorImpl (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method (Method.java:498)
at org.codehaus.groovy.reflection.CachedMethod (CachedMethod.java:90)
at groovy.lang.MetaMethod (MetaMethod.java:233)
at groovy.lang.MetaClassImpl (MetaClassImpl.java:1054)
at org.codehaus.groovy.runtime.ScriptBytecodeAdapter (ScriptBytecodeAdapter.java:128)
at org.codehaus.groovy.runtime.ScriptBytecodeAdapter (ScriptBytecodeAdapter.java:148)
at org.codehaus.groovy.tools.shell.InteractiveShellRunner (InteractiveShellRunner.groovy:100)
at sun.reflect.GeneratedMethodAccessor19 (null:-1)
at sun.reflect.DelegatingMethodAccessorImpl (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method (Method.java:498)
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite$PogoCachedMethodSiteNoUnwrapNoCoerce (PogoMetaMethodSite.java:267)
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite (PogoMetaMethodSite.java:52)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite (AbstractCallSite.java:137)
at org.codehaus.groovy.tools.shell.ShellRunner (ShellRunner.groovy:57)
at org.codehaus.groovy.tools.shell.InteractiveShellRunner (InteractiveShellRunner.groovy:-1)
at sun.reflect.NativeMethodAccessorImpl (NativeMethodAccessorImpl.java:-2)
at sun.reflect.NativeMethodAccessorImpl (NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method (Method.java:498)
at org.codehaus.groovy.reflection.CachedMethod (CachedMethod.java:90)
at groovy.lang.MetaMethod (MetaMethod.java:233)
at groovy.lang.MetaClassImpl (MetaClassImpl.java:1054)
at org.codehaus.groovy.runtime.ScriptBytecodeAdapter (ScriptBytecodeAdapter.java:128)
at org.codehaus.groovy.runtime.ScriptBytecodeAdapter (ScriptBytecodeAdapter.java:148)
at org.codehaus.groovy.tools.shell.InteractiveShellRunner (InteractiveShellRunner.groovy:66)
at java_lang_Runnable$run (null:-1)
at org.codehaus.groovy.runtime.callsite.CallSiteArray (CallSiteArray.java:42)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite (AbstractCallSite.java:108)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite (AbstractCallSite.java:112)
at org.codehaus.groovy.tools.shell.Groovysh (Groovysh.groovy:463)
at org.codehaus.groovy.tools.shell.Groovysh (Groovysh.groovy:402)
at org.apache.sqoop.shell.SqoopShell (SqoopShell.java:130)
Caused by: Exception: org.apache.sqoop.common.SqoopException Message: GENERIC_HDFS_CONNECTOR_0007:Invalid output directory - Unexpected exception
Stack trace:
at org.apache.sqoop.connector.hdfs.HdfsToInitializer (HdfsToInitializer.java:71)
at org.apache.sqoop.connector.hdfs.HdfsToInitializer (HdfsToInitializer.java:35)
at org.apache.sqoop.driver.JobManager (JobManager.java:449)
at org.apache.sqoop.driver.JobManager (JobManager.java:373)
at org.apache.sqoop.driver.JobManager (JobManager.java:276)
at org.apache.sqoop.handler.JobRequestHandler (JobRequestHandler.java:380)
at org.apache.sqoop.handler.JobRequestHandler (JobRequestHandler.java:116)
at org.apache.sqoop.server.v1.JobServlet (JobServlet.java:96)
at org.apache.sqoop.server.SqoopProtocolServlet (SqoopProtocolServlet.java:79)
at javax.servlet.http.HttpServlet (HttpServlet.java:646)
at javax.servlet.http.HttpServlet (HttpServlet.java:723)
at org.apache.catalina.core.ApplicationFilterChain (ApplicationFilterChain.java:290)
at org.apache.catalina.core.ApplicationFilterChain (ApplicationFilterChain.java:206)
at org.apache.hadoop.security.authentication.server.AuthenticationFilter (AuthenticationFilter.java:644)
at org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter (DelegationTokenAuthenticationFilter.java:304)
at org.apache.hadoop.security.authentication.server.AuthenticationFilter (AuthenticationFilter.java:592)
at org.apache.catalina.core.ApplicationFilterChain (ApplicationFilterChain.java:235)
at org.apache.catalina.core.ApplicationFilterChain (ApplicationFilterChain.java:206)
at org.apache.catalina.core.StandardWrapperValve (StandardWrapperValve.java:233)
at org.apache.catalina.core.StandardContextValve (StandardContextValve.java:191)
at org.apache.catalina.core.StandardHostValve (StandardHostValve.java:127)
at org.apache.catalina.valves.ErrorReportValve (ErrorReportValve.java:103)
at org.apache.catalina.core.StandardEngineValve (StandardEngineValve.java:109)
at org.apache.catalina.connector.CoyoteAdapter (CoyoteAdapter.java:293)
at org.apache.coyote.http11.Http11Processor (Http11Processor.java:861)
at org.apache.coyote.http11.Http11Protocol$Http11ConnectionHandler (Http11Protocol.java:606)
at org.apache.tomcat.util.net.JIoEndpoint$Worker (JIoEndpoint.java:489)
at java.lang.Thread (Thread.java:748)
Caused by: Exception: java.io.IOException Message: Failed on local exception: org.apache.hadoop.ipc.RpcException: RPC response exceeds maximum data length; Host Details : local host is: "node01/192.168.65.100"; destination host is: "node01":9870;
Stack trace:
at org.apache.hadoop.net.NetUtils (NetUtils.java:818)
at org.apache.hadoop.ipc.Client (Client.java:1549)
at org.apache.hadoop.ipc.Client (Client.java:1491)
at org.apache.hadoop.ipc.Client (Client.java:1388)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker (ProtobufRpcEngine.java:233)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker (ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy19 (null:-1)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB (ClientNamenodeProtocolTranslatorPB.java:907)
at sun.reflect.NativeMethodAccessorImpl (NativeMethodAccessorImpl.java:-2)
at sun.reflect.NativeMethodAccessorImpl (NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method (Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler (RetryInvocationHandler.java:422)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call (RetryInvocationHandler.java:165)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call (RetryInvocationHandler.java:157)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call (RetryInvocationHandler.java:95)
at org.apache.hadoop.io.retry.RetryInvocationHandler (RetryInvocationHandler.java:359)
at com.sun.proxy.$Proxy20 (null:-1)
at org.apache.hadoop.hdfs.DFSClient (DFSClient.java:1666)
at org.apache.hadoop.hdfs.DistributedFileSystem$29 (DistributedFileSystem.java:1576)
at org.apache.hadoop.hdfs.DistributedFileSystem$29 (DistributedFileSystem.java:1573)
at org.apache.hadoop.fs.FileSystemLinkResolver (FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem (DistributedFileSystem.java:1588)
at org.apache.hadoop.fs.FileSystem (FileSystem.java:1683)
at org.apache.sqoop.connector.hdfs.HdfsToInitializer (HdfsToInitializer.java:58)
at org.apache.sqoop.connector.hdfs.HdfsToInitializer (HdfsToInitializer.java:35)
at org.apache.sqoop.driver.JobManager (JobManager.java:449)
at org.apache.sqoop.driver.JobManager (JobManager.java:373)
at org.apache.sqoop.driver.JobManager (JobManager.java:276)
at org.apache.sqoop.handler.JobRequestHandler (JobRequestHandler.java:380)
at org.apache.sqoop.handler.JobRequestHandler (JobRequestHandler.java:116)
at org.apache.sqoop.server.v1.JobServlet (JobServlet.java:96)
at org.apache.sqoop.server.SqoopProtocolServlet (SqoopProtocolServlet.java:79)
at javax.servlet.http.HttpServlet (HttpServlet.java:646)
at javax.servlet.http.HttpServlet (HttpServlet.java:723)
at org.apache.catalina.core.ApplicationFilterChain (ApplicationFilterChain.java:290)
at org.apache.catalina.core.ApplicationFilterChain (ApplicationFilterChain.java:206)
at org.apache.hadoop.security.authentication.server.AuthenticationFilter (AuthenticationFilter.java:644)
at org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter (DelegationTokenAuthenticationFilter.java:304)
at org.apache.hadoop.security.authentication.server.AuthenticationFilter (AuthenticationFilter.java:592)
at org.apache.catalina.core.ApplicationFilterChain (ApplicationFilterChain.java:235)
at org.apache.catalina.core.ApplicationFilterChain (ApplicationFilterChain.java:206)
at org.apache.catalina.core.StandardWrapperValve (StandardWrapperValve.java:233)
at org.apache.catalina.core.StandardContextValve (StandardContextValve.java:191)
at org.apache.catalina.core.StandardHostValve (StandardHostValve.java:127)
at org.apache.catalina.valves.ErrorReportValve (ErrorReportValve.java:103)
at org.apache.catalina.core.StandardEngineValve (StandardEngineValve.java:109)
at org.apache.catalina.connector.CoyoteAdapter (CoyoteAdapter.java:293)
at org.apache.coyote.http11.Http11Processor (Http11Processor.java:861)
at org.apache.coyote.http11.Http11Protocol$Http11ConnectionHandler (Http11Protocol.java:606)
at org.apache.tomcat.util.net.JIoEndpoint$Worker (JIoEndpoint.java:489)
at java.lang.Thread (Thread.java:748)
Caused by: Exception: java.lang.Throwable Message: RPC response exceeds maximum data length
Stack trace:
at org.apache.hadoop.ipc.Client$IpcStreams (Client.java:1864)
at org.apache.hadoop.ipc.Client$Connection (Client.java:1183)
at org.apache.hadoop.ipc.Client$Connection (Client.java:1079)
哪位大侠帮忙看看:主要应该是这句
Caused by: Exception: java.io.IOException Message: Failed on local exception: org.apache.hadoop.ipc.RpcException: RPC response exceeds maximum data length; Host Details : local host is: "node01/192.168.65.100"; destination host is: "node01":9870;
但不知道问题出在哪里
我的link配置:
From database configuration
Schema name: mysql
Table name: help_topic
Table SQL statement:
Table column names:
Partition column name:
Null value allowed for the partition column:
Boundary query:
Incremental read
Check column:
Last value:
To HDFS configuration
Override null value:
Null value:
Output format:
0 : TEXT_FILE
1 : SEQUENCE_FILE
Choose: 0
Compression format:
0 : NONE
1 : DEFAULT
2 : DEFLATE
3 : GZIP
4 : BZIP2
5 : LZO
6 : LZ4
7 : SNAPPY
8 : CUSTOM
Choose: 0
Custom compression format:
Output directory: hdfs://node01:9870/sqoop
Append mode:
Throttling resources