java后端代码没有问题,启动所有项目后,一个项目consumer调用另一个项目的provider不错(zookeeper问题)

一个项目调用另一个项目提供接口时候报错,下面是具体错误信息:
com.alibaba.dubbo.rpc.RpcException: Forbid consumer 10.0.1.19 access service com.dhcc.mct.service.MctRemake.MctConsumerInfoService from registry zoo1:2181 use dubbo version 2.4.10, Please check registry access list (whitelist/blacklist).
at com.alibaba.dubbo.registry.integration.RegistryDirectory.doList(RegistryDirectory.java:586) ~[dubbo-2.9.10.jar:2.4.10]
at com.alibaba.dubbo.rpc.cluster.directory.AbstractDirectory.list(AbstractDirectory.java:73) ~[dubbo-2.9.10.jar:2.4.10]
at com.alibaba.dubbo.rpc.cluster.support.AbstractClusterInvoker.list(AbstractClusterInvoker.java:259) ~[dubbo-2.9.10.jar:2.4.10]
at com.alibaba.dubbo.rpc.cluster.support.AbstractClusterInvoker.invoke(AbstractClusterInvoker.java:218) ~[dubbo-2.9.10.jar:2.4.10]
at com.alibaba.dubbo.rpc.cluster.support.wrapper.MockClusterInvoker.invoke(MockClusterInvoker.java:72) ~[dubbo-2.9.10.jar:2.4.10]
at com.alibaba.dubbo.rpc.proxy.InvokerInvocationHandler.invoke(InvokerInvocationHandler.java:52) ~[dubbo-2.9.10.jar:2.4.10]
at com.alibaba.dubbo.common.bytecode.proxy34.getComLoginResult(proxy34.java) ~[na:2.4.10]
at com.dhcc.gateway.service.mctRemakeService.MctConsumerService.consumerLoginIds(MctConsumerService.java:210) ~[classes/:na]
at com.dhcc.gateway.controller.mctRemakeController.MctConsumerController.consLogin(MctConsumerController.java:109) ~[classes/:na]
at com.dhcc.gateway.controller.mctRemakeController.MctConsumerController$$FastClassBySpringCGLIB$$4196e5b2.invoke() [spring-core-4.2.0.RELEASE.jar:na]
at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:204) [spring-core-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:717) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:157) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.aop.aspectj.MethodInvocationProceedingJoinPoint.proceed(MethodInvocationProceedingJoinPoint.java:85) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at com.dhcc.gateway.aop.HandleBindDataError.aroundExec(HandleBindDataError.java:62) [classes/:na]
at com.dhcc.gateway.aop.HandleBindDataError.aroundExecClass(HandleBindDataError.java:54) [classes/:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_151]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_151]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_151]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_151]
at org.springframework.aop.aspectj.AbstractAspectJAdvice.invokeAdviceMethodWithGivenArgs(AbstractAspectJAdvice.java:621) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.aop.aspectj.AbstractAspectJAdvice.invokeAdviceMethod(AbstractAspectJAdvice.java:610) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.aop.aspectj.AspectJAroundAdvice.invoke(AspectJAroundAdvice.java:68) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:168) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.aop.interceptor.ExposeInvocationInterceptor.invoke(ExposeInvocationInterceptor.java:92) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:179) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:653) [spring-aop-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at com.dhcc.gateway.controller.mctRemakeController.MctConsumerController$$EnhancerBySpringCGLIB$$37915314.consLogin() [spring-core-4.2.0.RELEASE.jar:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_151]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_151]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_151]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_151]
at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:221) [spring-web-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:137) [spring-web-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:111) [spring-webmvc-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:806) [spring-webmvc-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:729) [spring-webmvc-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:85) [spring-webmvc-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:959) [spring-webmvc-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:893) [spring-webmvc-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:970) [spring-webmvc-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:872) [spring-webmvc-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at javax.servlet.http.HttpServlet.service(HttpServlet.java:648) [servlet-api.jar:na]
at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:846) [spring-webmvc-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at javax.servlet.http.HttpServlet.service(HttpServlet.java:729) [servlet-api.jar:na]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:292) [catalina.jar:8.0.53]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) [catalina.jar:8.0.53]
at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52) [tomcat-websocket.jar:8.0.53]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) [catalina.jar:8.0.53]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) [catalina.jar:8.0.53]
at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:85) [spring-web-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:107) [spring-web-4.2.0.RELEASE.jar:4.2.0.RELEASE]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) [catalina.jar:8.0.53]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) [catalina.jar:8.0.53]
at com.dhcc.gateway.common.ChangeParameterFilter.doFilter(ChangeParameterFilter.java:156) [classes/:na]
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:240) [catalina.jar:8.0.53]
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:207) [catalina.jar:8.0.53]
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:212) [catalina.jar:8.0.53]
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:94) [catalina.jar:8.0.53]
at org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:492) [catalina.jar:8.0.53]
at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:141) [catalina.jar:8.0.53]
at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:80) [catalina.jar:8.0.53]
at org.apache.catalina.valves.AbstractAccessLogValve.invoke(AbstractAccessLogValve.java:620) [catalina.jar:8.0.53]
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:88) [catalina.jar:8.0.53]
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:502) [catalina.jar:8.0.53]
at org.apache.coyote.http11.AbstractHttp11Processor.process(AbstractHttp11Processor.java:1152) [tomcat-coyote.jar:8.0.53]
at org.apache.coyote.AbstractProtocol$AbstractConnectionHandler.process(AbstractProtocol.java:684) [tomcat-coyote.jar:8.0.53]
at org.apache.tomcat.util.net.AprEndpoint$SocketWithOptionsProcessor.run(AprEndpoint.java:2464) [tomcat-coyote.jar:8.0.53]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [na:1.8.0_151]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [na:1.8.0_151]
at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61) [tomcat-util.jar:8.0.53]
at java.lang.Thread.run(Thread.java:748) [na:1.8.0_151]

1个回答

尝试把zookeeper中的数据清一下看看

tony_328427685
天涯云海 回复weixin_40255355: 用zkCli.sh连接zookeeper,删除里面的相关目录
一年多之前 回复
weixin_40255355
猿类进化史@ 试了不行
一年多之前 回复
weixin_40255355
猿类进化史@ zookeeper注册中心缓存文件.properties?
一年多之前 回复
Csdn user default icon
上传中...
上传图片
插入图片
抄袭、复制答案,以达到刷声望分或其他目的的行为,在CSDN问答是严格禁止的,一经发现立刻封号。是时候展现真正的技术了!
其他相关推荐
斐波那契堆JAVA实现的问题

最近在做用斐波那契堆改进Prim算法的作业。但是Java代码调试了两个周还是有问题,只能正确输出前3项。 还有几天就要提交作业了,在次跪求大神们帮忙瞧瞧代码。 代码如下: public class FibonacciNode { FibonacciNode child, left, right, parent; int vertex; float element; int degree; Boolean mark; /** Constructor **/ public FibonacciNode(int vertex, float element) { this.right=this; this.left=this; this.parent=null; this.child=null; this.vertex=vertex; this.element=element; this.degree=0; this.mark=false; } } public class FibonacciHeap { FibonacciNode root; int count; public FibonacciHeap(){ root=null; count=0; } //Return the number of nodes of the current heap public int size(){ return count; } //Judge if the heap is empty public boolean isEmpty(){ return root==null; } //Clear the whole heap public void clear(){ root=null; count=0; } //Insert a node to the heap. public void insert(int vertex, Float element){ FibonacciNode node=new FibonacciNode(vertex, element); if(root==null) root=node; else{ addNode(node, root); if(root.element>node.element){ root=node; } } count++; } //Add b to the tail of a //Notify that a and b are both the heads of a double-linked list private void catList(FibonacciNode a, FibonacciNode b){ FibonacciNode tmp= a.right; a.right =b.right; b.right.left= a; b.right= tmp; tmp.left= b; } //Get the minimum node of the heap and remove it from the heap public FibonacciNode extractMin(){ if(root==null){ return null; } if(root.child!=null){ FibonacciNode m = root; FibonacciNode start=root.child; for(int i=0; i<m.degree; i++){ if(start!=null){ start.parent=null; addNode(start, root); start=start.right; } } } //remove root from the root list of heap FibonacciNode min=root; min.left.right=min.right; min.right.left=min.left; //if min.right==min, then the root of the heap has no child if(min.right==min){ this.root=null; } else{ root=min.right; consolidate(); } //decrease the number of the nodes count--; return min; } /* // 将min每一个儿子(儿子和儿子的兄弟)都添加到"斐波那契堆的根链表"中 while (m.child != null){ FibonacciNode child=m.child; removeNode(child); if(child.right==child) m.child=null; else m.child=child.right; addNode(child, min); child.parent=null; } */ /* if(min.child!=null){ //set all the min's child's parent as null System.out.println("2:22222"); FibonacciNode startChild=min.child; startChild.parent=null; for(FibonacciNode x=startChild.right; x!=startChild; x=x.right){ x.parent=null; System.out.println("3:22222"); } //merge the children to the root list catList(root, startChild); } */ //unify two node if they have the same degree private void consolidate() { FibonacciNode[] cons=new FibonacciNode[this.count]; for(int i=0; i<this.count; i++) cons[i] = null; while (root!=null) { FibonacciNode x =root; if(x==x.right) root=null; else{ removeNode(x); root=x.right; } int d=x.degree; while(cons[d]!=null) { FibonacciNode y=cons[d]; if (x.element>y.element) { FibonacciNode tmp=x; x=y; y=tmp; } link(y, x); cons[d]=null; d++; } cons[d] = x; } root = null; for(int i=0; i<cons.length; i++){ if(cons[i] != null) { if(root == null) root = cons[i]; else{ addNode(cons[i], root); if ((cons[i]).element < root.element) root = cons[i]; } } } } //remove node1 from the root list and make it as node2's child private void link(FibonacciNode node1, FibonacciNode node2) { // remove node1 from the root list node1.left.right = node1.right; node1.right.left = node1.left; // set node as root's child if (node2.child == null) node2.child = node1; else{ node1.right=node2.child.right; node2.child.right=node1; node1.left=node2.child; node1.right.left=node1; } node1.parent = node2; node2.degree++; node1.mark = false; } //add node to the list rooted at root private void addNode(FibonacciNode node, FibonacciNode root) { node.left=root; node.right=root.right; root.right=node; node.right.left=node; } public void decreaseKey(FibonacciNode node, int key) { if (key > node.element) { System.out.println("decrease failed: the new key is no smaller than current key"); return; } if (root==null||node==null) return; node.element=key; FibonacciNode parent = node.parent; //if parent is null or node's element is no smaller than it's parent's, nothing is needed to be done if (parent!=null&&(node.element<parent.element)) { //remove node and add it to the root list cut(node, parent); cascadingCut(parent); } // update the root node if (node.element<root.element) root=node; } private void removeNode(FibonacciNode node) { node.left.right = node.right; node.right.left = node.left; } private void renewDegree(FibonacciNode parent){ parent.degree -= 1; if(parent. parent != null) renewDegree(parent.parent); } //remove node from it's parent and add it to the root list private void cut(FibonacciNode node, FibonacciNode parent) { removeNode(node); renewDegree(parent); //node has no sibling if (node==node.right) parent.child=null; else parent.child=node.right; node.parent=null; node.left=node.right=node; node.mark=false; //add to the root list of heap addNode(node, root); } //recurse cut the parent' parent, until reach the root list private void cascadingCut(FibonacciNode node) { FibonacciNode parent = node.parent; if (parent!=null) { if(node.mark==false) node.mark=true; else{ cut(node, parent); cascadingCut(parent); } } } //Add heap other to the current heap public void union(FibonacciHeap other) { if (this.root==null) //this is empty, just return the othe this.root=other.root; else if((other.root)!=null) {// both this and other are not empty catList(this.root, other.root); if(this.root.element>other.root.element) this.root=other.root; } this.count=this.count+other.count; other=null; return; } } 测试程序: public class FibonacciHeapTest { public static void main(String[] args){ FibonacciHeap heap=new FibonacciHeap(); for(int i=10; i>0; i--){ heap.insert(9-i, (float)i); } for(int i=0; i<10; i++){ System.out.println(heap.extractMin().element); } } } 运行结果: 1.0 2.0 3.0 4.0 Exception in thread "main" java.lang.NullPointerException at MST.FibonacciHeapTest.main(FibonacciHeapTest.java:10)

小白 求助 关于 OpenCV3.10 中 Qt New Functions 的使用问题

没接触过qt,在编写程序时想做个界面,看见opencv3.10的帮助文档中 提及 Qt New Functions 中的createButton 函数 ,想做个按钮。。结果没找到 createButton ,显示的只有 cvCreateButton。。。用了却显示如下错误。。 严重性 代码 说明 项目 文件 行 禁止显示状态 错误 LNK2019 无法解析的外部符号 cvCreateButton,该符号在函数 main 中被引用 opencv_3.10_test e:\visual studio 2015\Project\opencv_3.10_test\opencv_3.10_test\main.obj 1 严重性 代码 说明 项目 文件 行 禁止显示状态 错误 LNK1120 1 个无法解析的外部命令 opencv_3.10_test e:\visual studio 2015\Project\opencv_3.10_test\x64\Debug\opencv_3.10_test.exe 1 求大神解释。。是这个功能只能在 Qt 项目中使用吗?还是别的问题?。。但我看帮助文档中是直接在main函数中使用的。。

Java小程序报错,怎样解决?

import java.util.Scanner; public class ATM { public int c; Scanner in=new Scanner(System.in); Account aa[]=new Account[10]; int s[]={101,102,103,104,105,106,107,108,109,110}; public ATM() { for(int i=0;i<=9;i++) { aa[i]=new Account(); aa[i].setbal(100); aa[i].setac(s[i]); } } public void login() { int s=0; while(true) { System.out.println("请输入你的账号:"); c=in.nextInt(); for(int i=0;i<=9;i++) { if(c==this.aa[i].getac()) { s++; return ; } } if(s==0) { System.out.println("账号错误!"); } } } public void menu(int b) { System.out.println("1.查询余额\r\n2.取款\r\n3.存款\r\n4.退出"); System.out.println("请输入你的选择:"); int a=in.nextInt(); System.out.println("欢迎!"); switch(a) { case 1:System.out.println("你的账户余额为:"+this.aa[b].getbal());break; case 2:this.aa[b].withdraw();break; case 3:this.aa[b].deposit();break; case 4:return; } } public static void main(String args[]) { ATM cc=new ATM(); while(true){ cc.login(); cc.menu(cc.c); } } } Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException: 105 at exp4.ATM.menu(ATM.java:51) at exp4.ATM.main(ATM.java:63)

JAVA 反射构造函数带泛型参数的类

JAVA 反射构造函数带泛型参数的类 比如说: ``` public class User<T extends Person> { private T rootType; public User(T rootType){ this.rootType = rootType; } @Override public String toString() { return "User [rootType=" + rootType + "]"; } } public class Work extends Person { @Override public String toString() { return "Work [getClass()=" + getClass() + ", hashCode()=" + hashCode() + ", toString()=" + super.toString() + "]"; } } public class Person { @Override public String toString() { return "Person []"; } } ``` 我想要构造一个User(work), 如何构造? ``` public static <T extends Person> T getInstance() { String className = "com.aaa.reflection.utils.User"; Class clazz; try { clazz = Class.forName(className); Constructor<?>[] cons = clazz.getConstructors(); Constructor<?> con = cons[0]; Object initargs = Work.class; T obj = (T) con.newInstance(initargs); System.out.println(obj.toString()); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } ```

MALTAB中基于动力学模型的主动转向控制代码如何改正?

大家好,菜鸟最近在看龚建伟教授的《无人驾驶车辆模型检测》,可是看到第五章基于动力学模型的主动转向控制那里的源代码最后的有效集法'active set'从quadprog里删除了,请问应该怎么改? ``` %% 开始求解过程 options = optimset('Algorithm','active-set'); x_start=zeros(Nc+1,1);%加入一个起始点 [X,fval,exitflag]=quadprog(H,f,A_cons,b_cons,[],[],lb,ub,x_start,options); fprintf('exitflag=%d\n',exitflag); fprintf('H=%4.2f\n',H(1,1)); fprintf('f=%4.2f\n',f(1,1)); ```

Android反射问题, InvocationTargetException

通过反射调用私有无参函数,函数内部: ``` private int TargetFunction() { Log.d("DEBUG","进入了宿主函数TargetFunction"); Dialog alertDialog = new AlertDialog.Builder(MainActivity.this). setTitle("确定删除?"). setMessage("您确定删除该条信息吗?"). setIcon(R.mipmap.ic_launcher). setPositiveButton("确定", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub myFunction(); } }). setNegativeButton("取消", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub } }). setNeutralButton("查看详情", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub } }). create(); alertDialog.show(); return 9991; } ``` 当调用之后可以成功进入该函数内,但随即抛错,log信息如下: ``` 进入了宿主函数TargetFunction java.lang.reflect.InvocationTargetException at java.lang.reflect.Method.invoke(Native Method) at java.lang.reflect.Method.invoke(Method.java:372) at tools.unicorn.dexinject.MyApplication.abcd(MyApplication.java:55) at tools.unicorn.dexinject.MyApplication.dexInject(MyApplication.java:21) at java.lang.reflect.Method.invoke(Native Method) at java.lang.reflect.Method.invoke(Method.java:372) at com.aninterface.hook.hook_target.MainActivity.loadUninstallApk(MainActivity.java:123) at com.aninterface.hook.hook_target.MainActivity.access$100(MainActivity.java:22) at com.aninterface.hook.hook_target.MainActivity$2.onClick(MainActivity.java:39) at android.view.View.performClick(View.java:4780) at android.view.View$PerformClick.run(View.java:19866) at android.os.Handler.handleCallback(Handler.java:739) at android.os.Handler.dispatchMessage(Handler.java:95) at android.os.Looper.loop(Looper.java:135) at android.app.ActivityThread.main(ActivityThread.java:5254) at java.lang.reflect.Method.invoke(Native Method) at java.lang.reflect.Method.invoke(Method.java:372) at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903) at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698) Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'android.content.pm.ApplicationInfo android.content.Context.getApplicationInfo()' on a null object reference at android.content.ContextWrapper.getApplicationInfo(ContextWrapper.java:149) at android.view.ContextThemeWrapper.getTheme(ContextThemeWrapper.java:99) at android.app.AlertDialog.resolveDialogTheme(AlertDialog.java:154) at android.app.AlertDialog$Builder.<init>(AlertDialog.java:379) at com.aninterface.hook.hook_target.MainActivity.TargetFunction(MainActivity.java:46) ... 19 more ``` 不知有谁遇到过这样的问题,小弟实在是找不到原因了

C++编程问题请大神赐教谢谢

代码如下,为何会有3个错误 // ConsoleApplication68.cpp : 定义控制台应用程序的入口点。 // #include "stdafx.h" #include<iostream> using namespace std; const double PI = 3.14; class Shape { public: virtual void getArea() = 0; virtual void getPerim() = 0; }; class Rectangle :public Shape { private: double width; double length; public: Rectangle():width(0),length(0){} Rectangle(double width,double length):width(width),length(length){} virtual void getArea() { cout << "Area:" << width*length << endl; } virtual void getPerim() { cout << "Perim:" << 2 * (width*length) << endl; } ~Rectangle(); }; class Circle: public Shape{ private: double radius; public: Circle():radius(0){} Circle(double radius):radius(radius){} virtual void getArea() { cout << "Area:" << PI*radius*radius; } virtual void getPerim() { cout << "Perim:" << 2 * PI*radius; } ~Circle(); }; int main() { Rectangle a1(2, 3); Circle c1(2); a1.getArea(); a1.getPerim(); c1.getArea(); c1.getPerim(); return 0; }

hadoop集群下 spark 启动报错

``` Setting default log level to "WARN". To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel). 17/09/29 09:24:37 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable java.lang.IllegalArgumentException: Error while instantiating 'org.apache.spark.sql.hive.HiveSessionStateBuilder': at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1053) at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129) at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126) at org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$5.apply(SparkSession.scala:938) at org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$5.apply(SparkSession.scala:938) at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230) at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40) at scala.collection.mutable.HashMap.foreach(HashMap.scala:99) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:938) at org.apache.spark.repl.Main$.createSparkSession(Main.scala:97) ... 47 elided Caused by: org.apache.spark.sql.AnalysisException: java.lang.RuntimeException: org.apache.hadoop.fs.ParentNotDirectoryException: /tmp (is not a directory) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkIsDirectory(FSPermissionChecker.java:530) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkSimpleTraverse(FSPermissionChecker.java:522) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:497) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1603) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1621) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.resolvePath(FSDirectory.java:542) at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:51) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:2970) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1078) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:637) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:447) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:989) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:845) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:788) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1807) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2455) ; at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:106) at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193) at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105) at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93) at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39) at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54) at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52) at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35) at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289) at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050) ... 61 more Caused by: java.lang.RuntimeException: org.apache.hadoop.fs.ParentNotDirectoryException: /tmp (is not a directory) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkIsDirectory(FSPermissionChecker.java:530) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkSimpleTraverse(FSPermissionChecker.java:522) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:497) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1603) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1621) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.resolvePath(FSDirectory.java:542) at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:51) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:2970) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1078) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:637) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:447) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:989) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:845) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:788) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1807) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2455) at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522) at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362) at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266) at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66) at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65) at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194) at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) ... 70 more Caused by: org.apache.hadoop.fs.ParentNotDirectoryException: /tmp (is not a directory) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkIsDirectory(FSPermissionChecker.java:530) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkSimpleTraverse(FSPermissionChecker.java:522) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:497) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1603) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1621) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.resolvePath(FSDirectory.java:542) at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:51) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:2970) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1078) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:637) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:447) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:989) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:845) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:788) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1807) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2455) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106) at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73) at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3002) at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2970) at org.apache.hadoop.hdfs.DistributedFileSystem$21.doCall(DistributedFileSystem.java:1047) at org.apache.hadoop.hdfs.DistributedFileSystem$21.doCall(DistributedFileSystem.java:1043) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1061) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:1036) at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3679) at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:597) at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554) at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508) ... 84 more Caused by: org.apache.hadoop.ipc.RemoteException: /tmp (is not a directory) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkIsDirectory(FSPermissionChecker.java:530) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkSimpleTraverse(FSPermissionChecker.java:522) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:497) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1603) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1621) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.resolvePath(FSDirectory.java:542) at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:51) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:2970) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1078) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:637) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:447) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:989) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:845) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:788) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1807) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2455) at org.apache.hadoop.ipc.Client.call(Client.java:1475) at org.apache.hadoop.ipc.Client.call(Client.java:1412) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229) at com.sun.proxy.$Proxy22.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:558) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102) at com.sun.proxy.$Proxy23.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3000) ... 94 more <console>:14: error: not found: value spark import spark.implicits._ ^ <console>:14: error: not found: value spark import spark.sql ^ Welcome to ____ __ / __/__ ___ _____/ /__ _\ \/ _ \/ _ `/ __/ '_/ /___/ .__/\_,_/_/ /_/\_\ version 2.2.0 /_/ Using Scala version 2.11.8 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_144) Type in expressions to have them evaluated. Type :help for more information. scala> ```

intellinj idea event log出现空指针异常

![图片说明](https://img-ask.csdn.net/upload/201604/16/1460812995_272013.png) intellinj idea event log总是不间断的出现 NullPointerException: null 如图所示,谢谢解答

请教一个hibernate和spring整合时的问题

在整合的过程中,发现一个问题,session并没有关闭。以下是说明: [code="java"]protected Session getSession() { return this.sessionFactory.getCurrentSession(); }[/code] 这是获取session的方法 [code="java"](T)this.getSession().get(this.clazz, id)[/code] 这是获取对象的方法 事务在配置文件中进行配置: [code="java"] <bean id="transactionManager" class="org.springframework.orm.hibernate4.HibernateTransactionManager"> <property name="sessionFactory" ref="sessionFactory" /> </bean>[/code] 正常的情况来说,获取对象后,session就应该关闭了,可是在后面的测试中发现sesion并没有关闭,仍然可以查询出关联对象。网上说这种session不用手动关闭,请问它何时关闭

如何把字符串转换成用字符串显示的字符类型 求帮助!

private String[] colName = null; // 列名数组 private String[] colType = null; //存放数据类型 private String[] colValue = null; // 列植 这里有三个数组是从数据库中读取这些数据 然后将这些数据转换成一个对象 老师留的仿hibernate的根据主键获取对象的方法 我知道要用类反射做 可是怎么用就不太明白了 我写了这么个代码 Class c = Class.forName("java.lang.Double"); Object o = c.newInstance(); Object result = c.getDeclaredMethod("parse", String.class).invoke(Test.class, "4455"); System.out.println(result); 是通过反射获取Double的类 再调用parse方法 传的参数是4455 可是执行时报这个错误 java.lang.InstantiationException: java.lang.Double 我网上查了说Double这个类构造的时候需要一个参数 而newInstance(); 没有办法穿参数 然后有了下面代码 Class c = Class.forName("java.lang.Double"); Constructor cons = c.getConstructor(new Class[]{String.class}); Object o = cons.newInstance("parse"); 这个样子虽然类可以传参数了 可是就没有办法调方法了 Constructor没有getMethod方法。。 感谢你看了这么多 有解决办法的话发一个呗 万分感谢!

用于MySQL的编程语言:Java还是C ++? [关闭]

<div class="post-text" itemprop="text"> <p>I don't know much of either languages, but what would be better for writing applications that deals with the following. Right now, I use PHP, but I'm looking to see what would be the next ideal language to dig into, if the needs arise.</p> <ul> <li>MySQL</li> <li>off-line data processing of CSV, XML, etc. files</li> <li>web applications (dynamic websites)</li> </ul> <p>I understand that this could quite possibly be subjective.. so if you could just pitch in some pros and cons, that would be superb. TIA!</p> </div>

dubbo服务调用报错,哪位大神知道这是什么错误

Caused by: com.alibaba.com.caucho.hessian.io.HessianProtocolException: 'org.springframework.beans.factory.NoUniqueBeanDefinitionException' could not be instantiated at com.alibaba.com.caucho.hessian.io.JavaDeserializer.instantiate(JavaDeserializer.java:275) ~[dubbo-2.8.4.jar:2.8.4] at com.alibaba.com.caucho.hessian.io.JavaDeserializer.readObject(JavaDeserializer.java:155) ~[dubbo-2.8.4.jar:2.8.4] at com.alibaba.com.caucho.hessian.io.Hessian2Input.readObjectInstance(Hessian2Input.java:2067) ~[dubbo-2.8.4.jar:2.8.4] at com.alibaba.com.caucho.hessian.io.Hessian2Input.readObject(Hessian2Input.java:1592) ~[dubbo-2.8.4.jar:2.8.4] at com.alibaba.com.caucho.hessian.io.Hessian2Input.readObject(Hessian2Input.java:1576) ~[dubbo-2.8.4.jar:2.8.4] at com.alibaba.com.caucho.hessian.io.JavaDeserializer$ObjectFieldDeserializer.deserialize(JavaDeserializer.java:396) ~[dubbo-2.8.4.jar:2.8.4] ... 27 common frames omitted Caused by: java.lang.reflect.InvocationTargetException: null at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_191] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_191] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_191] at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[na:1.8.0_191] at com.alibaba.com.caucho.hessian.io.JavaDeserializer.instantiate(JavaDeserializer.java:271) ~[dubbo-2.8.4.jar:2.8.4] ... 32 common frames omitted Caused by: java.lang.NullPointerException: null at java.util.Objects.requireNonNull(Objects.java:203) ~[na:1.8.0_191] at java.util.Arrays$ArrayList.<init>(Arrays.java:3813) ~[na:1.8.0_191] at java.util.Arrays.asList(Arrays.java:3800) ~[na:1.8.0_191] at org.springframework.beans.factory.NoUniqueBeanDefinitionException.<init>(NoUniqueBeanDefinitionException.java:65) ~[spring-beans-4.2.5.RELEASE.jar:4.2.5.RELEASE] ... 37 common frames omitted ``` ```

protected final Class<?> findLoadedClass(String name)这个函数怎么用呢??

我想知道一个类是否已经被加载,在网上看见这个方法,但是这个方法声明为 ``` protected final ``` 我在代码中获得的ClassLoader 是子加载器,子加载器中没有这个方法,我该怎么用到这个方法呢?? 代码如下: ``` public class reflect_private { private String color; private void drive(){ System.out.println("哈哈哈哈哈的颜色为"+color); } } ``` ``` import java.lang.reflect.Method; public class reflect_test { public static void main(String[] args) throws ClassNotFoundException, IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException, NoSuchFieldException { ClassLoader cons = Thread.currentThread().getContextClassLoader(); // Class clazz = Class.forName("reflect_private");//cons.loadClass("reflect_private"); Class clazz = cons.loadClass("reflect_private"); reflect_private rp = (reflect_private)clazz.getConstructor().newInstance(); Field colorfed = clazz.getDeclaredField("color"); colorfed.setAccessible(true); colorfed.set(rp,"red"); Method driveMd = clazz.getDeclaredMethod("drive",(Class[])null); driveMd.setAccessible(true); driveMd.invoke(rp,null); Method flc = cons.getClass().getDeclaredMethod("findLoadedClass", String.class); flc.setAccessible(true); flc.invoke(cons,"reflect_private"); } } ``` 第二段代码是要运行的代码,报错findLoadedClass方法找不到

java,麻烦详细告诉为什么结果是'B and A', 而不是‘B and B’ ?????????

[code="java"]public class Cons { public static void main(String[] args) { A a1 = new A(); A a2 = new B(); B b = new B(); C c = new C(); D d = new D(); System.out.println(a2.show(b)); // ???????????? } } class A { public String show(D obj) { return ("A and D"); } public String show(A obj) { return ("A and A"); } public String show(C obj) { return ("A and C"); } } class B extends A { public String show(B obj) { return ("B and B"); } public String show(A obj) { return ("B and A"); } public String show(C obj) { return ("B and C"); } } class C extends B { } class D extends B { } [/code]

一个关于Hibernate的奇葩问题 HibernateSystemException

这是在公司的项目中遇到的问题,很奇怪。它不是每次都出现,有的时候出现,自己尝试解决,可实践了很久都不知道异常的原因。项目使用的是Hibernate+Spring,是客户端程序。具体异常详见: 2015-05-18 19:37:21,288-[TS] ERROR http-bio-8080-exec-4 org.hibernate.property.BasicPropertyAccessor - IllegalArgumentException in class: com.erp.pojo.OaWorkFlow, getter method of property: id 2015-05-18 19:37:21,290-[TS] WARN http-bio-8080-exec-4 org.springframework.remoting.support.RemoteInvocationTraceInterceptor - Processing of HttpInvokerServiceExporter remote call resulted in fatal exception: com.erp.service.RiskReviewService.findCurrentRiskReview org.springframework.orm.hibernate3.HibernateSystemException: IllegalArgumentException occurred calling getter of com.erp.pojo.OaWorkFlow.id; nested exception is org.hibernate.PropertyAccessException: IllegalArgumentException occurred calling getter of com.erp.pojo.OaWorkFlow.id 根据异常来看,说是OaWorkFlow类的id没有getter方法,可实际上是有的,这就是我为什么说奇葩的问题。下面是OaWorkFlow代码片段: ![图片说明](https://img-ask.csdn.net/upload/201505/18/1431949084_668656.png) 还请大家驻足帮忙看一看,以前是否有遇到这种问题,谢谢!

Non-resolvable parent POM for ***

Non-resolvable parent POM for com.test.XXX:XXX: test.XXX:XXX-integration:1.0.0: Failure to find com.test.XXX:XXX-parent:pom:1.0.0 in http://mvn.dev.mayibank. net/artifactory/repo was cached in the local repository, resolution will not be reattempted until the update interval of central has elapsed or updates are forced and 'parent.relativePath' points at 问题描述: 1、parent pom中部分dependency有改变,从springboot版本进行了升级 但是镜像等的都没有改动,本地仓库当前项目删掉后,打包就会报这个错误 但是如果clone一个没有升级之前的项目进行打包没有问题,打包后仓库中有parent的model,打包就没有问题,没有以前老的parent的module就会报错

导入*.cpp文件后,编译出现错误error LNK2005

seg.cpp的代码功能是预处理图片/检测图片 现在Cpig.cpp中加入include < seg.cpp > 出现如下错误: ``` 1>------ 已启动全部重新生成: 项目: AnimalsFatMeasure, 配置: Release x64 ------ 1>stdafx.cpp 1>AnimalsFatMeasure.cpp 1>AnimalsFatMeasureDlg.cpp 1>Cpig.cpp 1>INFOR.cpp 1>seg.cpp 1>seg.obj : error LNK2005: "class cv::Mat g_srcImage" (?g_srcImage@@3VMat@cv@@A) 已经在 Cpig.obj 中定义 1>seg.obj : error LNK2005: "class cv::Mat g_grayImage" (?g_grayImage@@3VMat@cv@@A) 已经在 Cpig.obj 中定义 1>seg.obj : error LNK2005: "class cv::Mat out_therImage" (?out_therImage@@3VMat@cv@@A) 已经在 Cpig.obj 中定义 1>seg.obj : error LNK2005: "class std::basic_string<char,struct std::char_traits<char>,class std::allocator<char> > saveImage" (?saveImage@@3V?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@std@@A) 已经在 Cpig.obj 中定义 1>seg.obj : error LNK2005: "class std::basic_string<char,struct std::char_traits<char>,class std::allocator<char> > file_save" (?file_save@@3V?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@std@@A) 已经在 Cpig.obj 中定义 1>Cpig.obj : error LNK2005: "float __cdecl Zfhanliang(class std::basic_string<char,struct std::char_traits<char>,class std::allocator<char> >)" (?Zfhanliang@@YAMV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@std@@@Z) 已经在 Cpig.obj 中定义 1>Cpig.obj : error LNK2005: "bool __cdecl Jh12(int,int)" (?Jh12@@YA_NHH@Z) 已经在 Cpig.obj 中定义 ``` 具体代码如下 seg.cpp ``` #include"stdafx.h" #include <iostream> #include<opencv2\opencv.hpp> #include<opencv2/imgproc/imgproc.hpp> #include<vector> #include<math.h> #include<algorithm> #include<time.h> #include<io.h> #include<stdio.h> #include<string> //#include<squares.cpp> using namespace cv; using namespace std; Mat g_srcImage, g_grayImage, out_therImage; string saveImage = "preidentify/save"; string file_save = "middleidentify/"; ``` Cpig.cpp代码如下 ``` #include "Resource.h" #include "stdafx.h" #include "AnimalsFatMeasure.h" #include "Cpig.h" #include "afxdialogex.h" #include "INFOR.h" #include <opencv2/opencv.hpp> #include "seg.cpp" ```

Mysql查询日期字段下不同的值,并将这些不同日期升序排列,排序结果不正确,请问如何解决呢?

我从系统中导出了仓库每个交货期的收货记录,一份excel是一个收货日期“CONS DATE”,并讲这些表导入了navicat,现在想查询出有多少个不同的"CONS DATE”,并将该字段下的不同值按照升序/降序排列。我的查询代码如下: “SELECT DISTINCT`CONS DATE` FROM `po-booking report` ORDER BY `CONS DATE` ASC;” 执行结果中的日期确实都是不同的,但是排序结果不准确,![图片说明](https://img-ask.csdn.net/upload/202004/14/1586846325_889131.png)![图片说明](https://img-ask.csdn.net/upload/202004/14/1586846375_985778.png) 请问应该如何解决呢?

警告是:The constructor Integer(int) is deprecated since version 9,我想问问这个该怎么改

``` import java.util.Comparator; import java.util.Iterator; import java.util.TreeSet; public class TreeSetDemo2 { public static void main(String[] args) { TreeSet<String> ts=new TreeSet(); ts.add("a"); ts.add("sfsd"); ts.add("fgdfd"); ts.add("aaa"); ts.add("bcd"); Iterator<String> it=ts.iterator(); while(it.hasNext()) { System.out.println(it.next()); } } } class StringLengthCompare implements Comparator<Object> { public int compare(Object o1,Object o2) { String s1=(String) o1; String s2=(String) o2; int num=new Integer(s1.length()).compareTo(new Integer(s2.length())); if(num==0) return s1.compareTo(s2); return num; } } ``` 出现在int num=new Integer(s1.length()).compareTo(new Integer(s2.length()));这一行

在中国程序员是青春饭吗?

今年,我也32了 ,为了不给大家误导,咨询了猎头、圈内好友,以及年过35岁的几位老程序员……舍了老脸去揭人家伤疤……希望能给大家以帮助,记得帮我点赞哦。 目录: 你以为的人生 一次又一次的伤害 猎头界的真相 如何应对互联网行业的「中年危机」 一、你以为的人生 刚入行时,拿着傲人的工资,想着好好干,以为我们的人生是这样的: 等真到了那一天,你会发现,你的人生很可能是这样的: ...

技术大佬:我去,你写的 switch 语句也太老土了吧

昨天早上通过远程的方式 review 了两名新来同事的代码,大部分代码都写得很漂亮,严谨的同时注释也很到位,这令我非常满意。但当我看到他们当中有一个人写的 switch 语句时,还是忍不住破口大骂:“我擦,小王,你丫写的 switch 语句也太老土了吧!” 来看看小王写的代码吧,看完不要骂我装逼啊。 private static String createPlayer(PlayerTypes p...

华为初面+综合面试(Java技术面)附上面试题

华为面试整体流程大致分为笔试,性格测试,面试,综合面试,回学校等结果。笔试来说,华为的难度较中等,选择题难度和网易腾讯差不多。最后的代码题,相比下来就简单很多,一共3道题目,前2题很容易就AC,题目已经记不太清楚,不过难度确实不大。最后一题最后提交的代码过了75%的样例,一直没有发现剩下的25%可能存在什么坑。 笔试部分太久远,我就不怎么回忆了。直接将面试。 面试 如果说腾讯的面试是挥金如土...

和黑客斗争的 6 天!

互联网公司工作,很难避免不和黑客们打交道,我呆过的两家互联网公司,几乎每月每天每分钟都有黑客在公司网站上扫描。有的是寻找 Sql 注入的缺口,有的是寻找线上服务器可能存在的漏洞,大部分都...

讲一个程序员如何副业月赚三万的真实故事

loonggg读完需要3分钟速读仅需 1 分钟大家好,我是你们的校长。我之前讲过,这年头,只要肯动脑,肯行动,程序员凭借自己的技术,赚钱的方式还是有很多种的。仅仅靠在公司出卖自己的劳动时...

win10暴力查看wifi密码

刚才邻居打了个电话说:喂小灰,你家wifi的密码是多少,我怎么连不上了。 我。。。 我也忘了哎,就找到了一个好办法,分享给大家: 第一种情况:已经连接上的wifi,怎么知道密码? 打开:控制面板\网络和 Internet\网络连接 然后右击wifi连接的无线网卡,选择状态 然后像下图一样: 第二种情况:前提是我不知道啊,但是我以前知道密码。 此时可以利用dos命令了 1、利用netsh wlan...

上班一个月,后悔当初着急入职的选择了

最近有个老铁,告诉我说,上班一个月,后悔当初着急入职现在公司了。他之前在美图做手机研发,今年美图那边今年也有一波组织优化调整,他是其中一个,在协商离职后,当时捉急找工作上班,因为有房贷供着,不能没有收入来源。所以匆忙选了一家公司,实际上是一个大型外包公司,主要派遣给其他手机厂商做外包项目。**当时承诺待遇还不错,所以就立马入职去上班了。但是后面入职后,发现薪酬待遇这块并不是HR所说那样,那个HR自...

总结了 150 余个神奇网站,你不来瞅瞅吗?

原博客再更新,可能就没了,之后将持续更新本篇博客。

副业收入是我做程序媛的3倍,工作外的B面人生是怎样的?

提到“程序员”,多数人脑海里首先想到的大约是:为人木讷、薪水超高、工作枯燥…… 然而,当离开工作岗位,撕去层层标签,脱下“程序员”这身外套,有的人生动又有趣,马上展现出了完全不同的A/B面人生! 不论是简单的爱好,还是正经的副业,他们都干得同样出色。偶尔,还能和程序员的特质结合,产生奇妙的“化学反应”。 @Charlotte:平日素颜示人,周末美妆博主 大家都以为程序媛也个个不修边幅,但我们也许...

如果你是老板,你会不会踢了这样的员工?

有个好朋友ZS,是技术总监,昨天问我:“有一个老下属,跟了我很多年,做事勤勤恳恳,主动性也很好。但随着公司的发展,他的进步速度,跟不上团队的步伐了,有点...

我入职阿里后,才知道原来简历这么写

私下里,有不少读者问我:“二哥,如何才能写出一份专业的技术简历呢?我总感觉自己写的简历太烂了,所以投了无数份,都石沉大海了。”说实话,我自己好多年没有写过简历了,但我认识的一个同行,他在阿里,给我说了一些他当年写简历的方法论,我感觉太牛逼了,实在是忍不住,就分享了出来,希望能够帮助到你。 01、简历的本质 作为简历的撰写者,你必须要搞清楚一点,简历的本质是什么,它就是为了来销售你的价值主张的。往深...

带了6个月的徒弟当了面试官,而身为高级工程师的我天天修Bug......

即将毕业的应届毕业生一枚,现在只拿到了两家offer,但最近听到一些消息,其中一个offer,我这个组据说客户很少,很有可能整组被裁掉。 想问大家: 如果我刚入职这个组就被裁了怎么办呢? 大家都是什么时候知道自己要被裁了的? 面试软技能指导: BQ/Project/Resume 试听内容: 除了刷题,还有哪些技能是拿到offer不可或缺的要素 如何提升面试软实力:简历, 行为面试,沟通能...

!大部分程序员只会写3年代码

如果世界上都是这种不思进取的软件公司,那别说大部分程序员只会写 3 年代码,恐怕就没有程序员这种职业。

离职半年了,老东家又发 offer,回不回?

有小伙伴问松哥这个问题,他在上海某公司,在离职了几个月后,前公司的领导联系到他,希望他能够返聘回去,他很纠结要不要回去? 俗话说好马不吃回头草,但是这个小伙伴既然感到纠结了,我觉得至少说明了两个问题:1.曾经的公司还不错;2.现在的日子也不是很如意。否则应该就不会纠结了。 老实说,松哥之前也有过类似的经历,今天就来和小伙伴们聊聊回头草到底吃不吃。 首先一个基本观点,就是离职了也没必要和老东家弄的苦...

HTTP与HTTPS的区别

面试官问HTTP与HTTPS的区别,我这样回答让他竖起大拇指!

程序员毕业去大公司好还是小公司好?

虽然大公司并不是人人都能进,但我仍建议还未毕业的同学,尽力地通过校招向大公司挤,但凡挤进去,你这一生会容易很多。 大公司哪里好?没能进大公司怎么办?答案都在这里了,记得帮我点赞哦。 目录: 技术氛围 内部晋升与跳槽 啥也没学会,公司倒闭了? 不同的人脉圈,注定会有不同的结果 没能去大厂怎么办? 一、技术氛围 纵观整个程序员技术领域,哪个在行业有所名气的大牛,不是在大厂? 而且众所...

程序员为什么千万不要瞎努力?

本文作者用对比非常鲜明的两个开发团队的故事,讲解了敏捷开发之道 —— 如果你的团队缺乏统一标准的环境,那么即使勤劳努力,不仅会极其耗时而且成果甚微,使用...

为什么程序员做外包会被瞧不起?

二哥,有个事想询问下您的意见,您觉得应届生值得去外包吗?公司虽然挺大的,中xx,但待遇感觉挺低,马上要报到,挺纠结的。

当HR压你价,说你只值7K,你该怎么回答?

当HR压你价,说你只值7K时,你可以流畅地回答,记住,是流畅,不能犹豫。 礼貌地说:“7K是吗?了解了。嗯~其实我对贵司的面试官印象很好。只不过,现在我的手头上已经有一份11K的offer。来面试,主要也是自己对贵司挺有兴趣的,所以过来看看……”(未完) 这段话主要是陪HR互诈的同时,从公司兴趣,公司职员印象上,都给予对方正面的肯定,既能提升HR的好感度,又能让谈判气氛融洽,为后面的发挥留足空间。...

面试:第十六章:Java中级开发(16k)

HashMap底层实现原理,红黑树,B+树,B树的结构原理 Spring的AOP和IOC是什么?它们常见的使用场景有哪些?Spring事务,事务的属性,传播行为,数据库隔离级别 Spring和SpringMVC,MyBatis以及SpringBoot的注解分别有哪些?SpringMVC的工作原理,SpringBoot框架的优点,MyBatis框架的优点 SpringCould组件有哪些,他们...

面试阿里p7,被按在地上摩擦,鬼知道我经历了什么?

面试阿里p7被问到的问题(当时我只知道第一个):@Conditional是做什么的?@Conditional多个条件是什么逻辑关系?条件判断在什么时候执...

终于懂了TCP和UDP协议区别

终于懂了TCP和UDP协议区别

Python爬虫,高清美图我全都要(彼岸桌面壁纸)

爬取彼岸桌面网站较为简单,用到了requests、lxml、Beautiful Soup4

无代码时代来临,程序员如何保住饭碗?

编程语言层出不穷,从最初的机器语言到如今2500种以上的高级语言,程序员们大呼“学到头秃”。程序员一边面临编程语言不断推陈出新,一边面临由于许多代码已存在,程序员编写新应用程序时存在重复“搬砖”的现象。 无代码/低代码编程应运而生。无代码/低代码是一种创建应用的方法,它可以让开发者使用最少的编码知识来快速开发应用程序。开发者通过图形界面中,可视化建模来组装和配置应用程序。这样一来,开发者直...

面试了一个 31 岁程序员,让我有所触动,30岁以上的程序员该何去何从?

最近面试了一个31岁8年经验的程序猿,让我有点感慨,大龄程序猿该何去何从。

大三实习生,字节跳动面经分享,已拿Offer

说实话,自己的算法,我一个不会,太难了吧

程序员垃圾简历长什么样?

已经连续五年参加大厂校招、社招的技术面试工作,简历看的不下于万份 这篇文章会用实例告诉你,什么是差的程序员简历! 疫情快要结束了,各个公司也都开始春招了,作为即将红遍大江南北的新晋UP主,那当然要为小伙伴们做点事(手动狗头)。 就在公众号里公开征简历,义务帮大家看,并一一点评。《启舰:春招在即,义务帮大家看看简历吧》 一石激起千层浪,三天收到两百多封简历。 花光了两个星期的所有空闲时...

美团面试,问了ThreadLocal原理,这个回答让我通过了

他想都想不到,ThreadLocal我烂熟于心

大牛都会用的IDEA调试技巧!!!

导读 前天面试了一个985高校的实习生,问了他平时用什么开发工具,他想也没想的说IDEA,于是我抛砖引玉的问了一下IDEA的调试用过吧,你说说怎么设置断点...

面试官:你连SSO都不懂,就别来面试了

大厂竟然要考我SSO,卧槽。

立即提问
相关内容推荐