Netty 遇到的 refCnt: 0异常

代码运行到ByteBuf in = (ByteBuf) msg;
byte[] req = new byte[in.readableBytes()];
in.readBytes(req);
就会报错,哪位大神能解决?
public class ZhiNengKaiGuanServerHandler extends ChannelInboundHandlerAdapter {

@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
    // TODO Auto-generated method stub
    super.channelActive(ctx);
    //与服务端建立连接后
    System.out.println("链接成功");
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) 
        throws Exception {
    // TODO Auto-generated method stub
    super.channelRead(ctx, msg);
    System.out.println("server channelRead..");
    System.out.println("recive data:"+msg);
    ByteBuf in = (ByteBuf) msg;
    //in.retain();
    byte[] req = new byte[in.readableBytes()];
    in.readBytes(req);

    //String resultStr = new String(req);
    //System.out.println("Client said:" + resultStr);  
    // 释放资源,这行很关键  
    //in.release();  
    //String str=new String(req,"UTF-8");
    System.out.print("Receive data: { ");
    for (int i = 0; i <req.length ; i++) {
        String dhs = "00" + Integer.toHexString(req[i] & 0xFF);
        System.out.print(dhs.substring(dhs.length() - 2, dhs.length())
        .toUpperCase() + 
        " ");
    }
    System.out.println(" } ");
    String ser="hellow ,I'm Server";
    ctx.write(ser);


}

@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
    // TODO Auto-generated method stub
    super.channelReadComplete(ctx);
     System.out.println("server channelReadComplete..");
     ctx.flush();//刷新后才将数据发出到SocketChannel
}

@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
        throws Exception {
    // TODO Auto-generated method stub
    super.exceptionCaught(ctx, cause);
     System.out.println("server exceptionCaught..");
     cause.printStackTrace();
        ctx.close();

}


public class ZhiNengKaiGuanServer {

 private int port;

 public ZhiNengKaiGuanServer(int port) {
     this.port = port;
 }

 public void run() throws Exception {
        // 服务器线程组 用于网络事件的处理 一个用于服务器接收客户端的连接
        // 另一个线程组用于处理SocketChannel的网络读写
        EventLoopGroup bossGroup = new NioEventLoopGroup(); // (1)
        EventLoopGroup workerGroup = new NioEventLoopGroup();
        try {
            // NIO服务器端的辅助启动类 降低服务器开发难度
            ServerBootstrap b = new ServerBootstrap(); // (2)
            b.group(bossGroup, workerGroup)
             .channel(NioServerSocketChannel.class) // (3) 类似NIO中serverSocketChannel
             .childHandler(new ChannelInitializer<SocketChannel>() { // (4)
                 @Override
                 public void initChannel(SocketChannel ch) throws Exception {
                     ch.pipeline().addLast("encoder", new ObjectEncoder());  
                     //ch.pipeline().addLast("decoder", new ObjectDecoder(Integer.MAX_VALUE, ClassResolvers.cacheDisabled(null))); 
                     //ch.pipeline().addLast("decoder", new StringEncoder());
                     //ch.pipeline().addLast("frameDecoder", new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, 4, 0, 4));  
                     //ch.pipeline().addLast("frameEncoder", new LengthFieldPrepender(4));  

                     // 自己的逻辑Handler
                     ch.pipeline().addLast(new ZhiNengKaiGuanServerHandler());
                 }
             })
             .option(ChannelOption.SO_BACKLOG, 1024)          // (5)
             .childOption(ChannelOption.SO_KEEPALIVE, true); // (6)// 最后绑定I/O事件的处理类

            // 服务器绑定端口监听
            ChannelFuture f = b.bind(port).sync(); // (7)
         // 监听服务器关闭监听
            f.channel().closeFuture().sync();
        } finally {
            // 优雅退出 释放线程池资源
            bossGroup.shutdownGracefully();
            workerGroup.shutdownGracefully();
            System.out.println("服务器优雅的释放了线程资源...");
        }
    }
 public static void main(String[] args) throws Exception {
        int port = 9000;
        if (args != null && args.length > 0) {
            try {
                port = Integer.valueOf(args[0]);
            } catch (NumberFormatException e) {

            }
        }

        new ZhiNengKaiGuanServer(9527).run();
    }

}

public class ZhiNengKaiGuanCommuccationConnector {

// 配置客户端NIO线程组
EventLoopGroup group = new NioEventLoopGroup();
private ZhiNengKaiGuan zhiNengKaiGuan;
public ZhiNengKaiGuanCommuccationConnector(ZhiNengKaiGuan zhiNengKaiGuan){
     this.zhiNengKaiGuan=zhiNengKaiGuan;
}
public ZhiNengKaiGuan getZhiNengKaiGuan() {
    return zhiNengKaiGuan;
}
public void setZhiNengKaiGuan(ZhiNengKaiGuan zhiNengKaiGuan) {
    this.zhiNengKaiGuan = zhiNengKaiGuan;
}

public void connect(final byte[] req) throws Exception {
    int port=zhiNengKaiGuan.getPort();
    String IP=zhiNengKaiGuan.getIp();
    System.out.println("this zhinengchazuo ip :" + this.zhiNengKaiGuan.getIp() + 
            " port:" + this.zhiNengKaiGuan.getPort());
          System.out.print("Send Command: { ");
          for (int i = 0; i < req.length; i++) {
            String dhs = "00" + 
              Integer.toHexString(req[i] & 0xFF);
            System.out.print(dhs.substring(dhs.length() - 2, 
              dhs.length()).toUpperCase() + 
              " ");
          }
          System.out.println(" } ");
    //配置客户端NIO线程组
    EventLoopGroup group = new NioEventLoopGroup();
    try {
        //客户端辅助启动类 对客户端配置
        Bootstrap b = new Bootstrap(); // (1)
        b.group(group); // (2)
        b.channel(NioSocketChannel.class); // (3)
        b.option(ChannelOption.SO_KEEPALIVE, true); // (4)
        b.handler(new ChannelInitializer<SocketChannel>() {
            @Override
            public void initChannel(SocketChannel ch) throws Exception {

                ch.pipeline().addLast("encoder", new ObjectEncoder());  
                ch.pipeline().addLast("decoder", new ObjectDecoder(Integer.MAX_VALUE, ClassResolvers.cacheDisabled(null)));
                ch.pipeline().addLast(new ZhiNengKaiGuanCommuncationHander(req));
            }
        });

        //异步链接服务器 同步等待链接成功
        ChannelFuture f = b.connect(IP, port).sync();
        //等待链接关闭
        f.channel().closeFuture().sync();

    } finally {
        group.shutdownGracefully();
        System.out.println("客户端优雅的释放了线程资源...");
    }

}

}

public class ZhiNengKaiGuanCommuncationHander extends ChannelInboundHandlerAdapter {
private static final Logger logger=Logger.getLogger(ZhiNengKaiGuanCommuncationHander.class.getName());
private ByteBuf sendMessage;

public ZhiNengKaiGuanCommuncationHander(byte[] reqs){

    sendMessage=Unpooled.buffer(reqs.length);
    sendMessage.writeBytes(reqs);

}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
    System.out.println(sendMessage);
    ctx.writeAndFlush(sendMessage);
    System.out.println("客户端active");
}

@Override
public void channelRead(ChannelHandlerContext ctx, Object msg)
        throws Exception {
    System.out.println("客户端收到服务器响应数据");
    /*ByteBuf m = (ByteBuf) msg; // (1)
    try {
        long currentTimeMillis = (m.readUnsignedInt() - 2208988800L) * 1000L;
        System.out.println(new Date(currentTimeMillis));
        byte[] req=new byte[m.readableBytes()];
        CommonUtil.remsg=req;
        m.readBytes(req);
        String body=new String(req,"UTF-8");
        System.out.println("Now is:"+body);

        ctx.close();
   } finally {
         m.release();
   }*/

}

@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
    ctx.flush();
    System.out.println("客户端收到服务器响应数据处理完成");
}

@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
        throws Exception {
    logger.warning("Unexpected exception from downstream:"+cause.getMessage());
    ctx.close();
    System.out.println("客户端异常退出");
}

}

public class Test2 {
public static void main(String[] args) {

    ZhiNengKaiGuan s=new ZhiNengKaiGuan("127.0.1.1", 9527);
    KaiGuanCmd j=new KaiGuanCmd(s);
    j.query();
}

}

5个回答

不要主动去release,不然异常

qq_26276097
qq_26276097 解决了,大佬果然神机妙算。
大约 2 年之前 回复

控制台报如下异常

链接成功
server channelRead..
recive data:SimpleLeakAwareByteBuf(UnpooledHeapByteBuf(freed))
2016-9-18 9:14:47 io.netty.channel.DefaultChannelPipeline onUnhandledInboundException
警告: An exceptionCaught() event was fired, and it reached at the tail of the pipeline. It usually means the last handler in the pipeline did not handle the exception.
io.netty.util.IllegalReferenceCountException: refCnt: 0
at io.netty.buffer.AbstractByteBuf.ensureAccessible(AbstractByteBuf.java:1407)
at io.netty.buffer.AbstractByteBuf.checkReadableBytes0(AbstractByteBuf.java:1393)
at io.netty.buffer.AbstractByteBuf.checkReadableBytes(AbstractByteBuf.java:1389)
at io.netty.buffer.AbstractByteBuf.readBytes(AbstractByteBuf.java:850)
at io.netty.buffer.AbstractByteBuf.readBytes(AbstractByteBuf.java:858)
at io.netty.buffer.WrappedByteBuf.readBytes(WrappedByteBuf.java:649)
at com.zhinengkaiguan.server.ZhiNengKaiGuanServerHandler.channelRead(ZhiNengKaiGuanServerHandler.java:31)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:372)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:358)
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:350)
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:372)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:358)
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926)
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:129)
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:610)
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:551)
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:465)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:437)
at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:873)
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
at java.lang.Thread.run(Thread.java:619)
server exceptionCaught..
io.netty.util.IllegalReferenceCountException: refCnt: 0
at io.netty.buffer.AbstractByteBuf.ensureAccessible(AbstractByteBuf.java:1407)
at io.netty.buffer.AbstractByteBuf.checkReadableBytes0(AbstractByteBuf.java:1393)
at io.netty.buffer.AbstractByteBuf.checkReadableBytes(AbstractByteBuf.java:1389)
at io.netty.buffer.AbstractByteBuf.readBytes(AbstractByteBuf.java:850)
at io.netty.buffer.AbstractByteBuf.readBytes(AbstractByteBuf.java:858)
at io.netty.buffer.WrappedByteBuf.readBytes(WrappedByteBuf.java:649)
at com.zhinengkaiguan.server.ZhiNengKaiGuanServerHandler.channelRead(ZhiNengKaiGuanServerHandler.java:31)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:372)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:358)
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:350)
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:372)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:358)
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926)
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:129)
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:610)
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:551)
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:465)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:437)
at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:873)
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
at java.lang.Thread.run(Thread.java:619)
server channelReadComplete..


同学你解决了吗?
我也遇到这个问题了。。。弄了半天没有解决

不主动release就行了

我遇到了,不知道为什么,在wirteAndFlush的时候回出错,在wiriteAndFlush之前retain就没问题了。

private static void sendHttpResponse(ChannelHandlerContext ctx,
                                         FullHttpRequest req, DefaultFullHttpResponse res) {
        // 返回应答给客户端
        if (res.status().code() != 200) {
            ByteBuf buf = Unpooled.copiedBuffer(res.status().toString(), CharsetUtil.UTF_8);
            res.content().writeBytes(buf);
            HttpUtil.setContentLength(res, res.content().readableBytes());
        }
        // 如果是非Keep-Alive,关闭连接
        ReferenceCountUtil.retain(req);
        ChannelFuture f = ctx.channel().writeAndFlush(res);
        if (!isKeepAlive(req) || res.status().code() != 200) {
            f.addListener(ChannelFutureListener.CLOSE);
        }
    }
Csdn user default icon
上传中...
上传图片
插入图片
抄袭、复制答案,以达到刷声望分或其他目的的行为,在CSDN问答是严格禁止的,一经发现立刻封号。是时候展现真正的技术了!
其他相关推荐
netty4.0 read0方法 wirteflush之后,就捕获异常了。

netty4 io.netty.util.IllegalReferenceCountException: refCnt: 0, decrement: 1 网上也看了,是relase了,造成实例被收回。这个要怎么处理?前提这是tcp长连接,怎么也会被收回???

Netty 远程断开后的异常问题

使用netty做长连接,server端每30秒进行一次心跳包发送,client端也设置了ReadTimeoutHandler,时间长度为40秒,在局域网开发环境下,如果server自动断开,不管是client.channel本身,还是ReadTimeoutHandler都有异常提出,但是在非局域网连接情况下,将client连接到到公网的server后,sever一旦断开,所有的client端都不会提示异常,而且连ReadTimeoutHandler也不会按指定时间执行,请问是什么原因,如何解决呢?请各位高手指点一下.

android中使用Netty当客户端时候出现以下错误连接不上

Caused by: io.netty.channel.ChannelException: Failed to open a socket. at io.netty.channel.socket.nio.NioSocketChannel.newSocket(NioSocketChannel.java:63) at io.netty.channel.socket.nio.NioSocketChannel.<init>(NioSocketChannel.java:80) at io.netty.channel.socket.nio.NioSocketChannel.<init>(NioSocketChannel.java:73) at java.lang.Class.newInstance(Native Method) at io.netty.channel.ReflectiveChannelFactory.newChannel(ReflectiveChannelFactory.java:38) at io.netty.bootstrap.AbstractBootstrap.initAndRegister(AbstractBootstrap.java:319)  at io.netty.bootstrap.Bootstrap.doResolveAndConnect(Bootstrap.java:163)  at io.netty.bootstrap.Bootstrap.connect(Bootstrap.java:119)  at com.client.Client.connect(Client.java:43)  at com.example.shx.serachnote.Main.onCreate(Main.java:43)  at android.app.Activity.performCreate(Activity.java:6664)  at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1118)  at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2599)  at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2707)  at android.app.ActivityThread.-wrap12(ActivityThread.java)  at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1460)  at android.os.Handler.dispatchMessage(Handler.java:102)  at android.os.Looper.loop(Looper.java:154)  at android.app.ActivityThread.main(ActivityThread.java:6077)  at java.lang.reflect.Method.invoke(Native Method)  at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:865)  at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:755)  Caused by: java.net.SocketException: Permission denied at sun.nio.ch.Net.socket0(Native Method) at sun.nio.ch.Net.socket(Net.java:433) at sun.nio.ch.Net.socket(Net.java:426) at sun.nio.ch.SocketChannelImpl.<init>(SocketChannelImpl.java:107) at sun.nio.ch.SelectorProviderImpl.openSocketChannel(SelectorProviderImpl.java:60) at io.netty.channel.socket.nio.NioSocketChannel.newSocket(NioSocketChannel.java:61) at io.netty.channel.socket.nio.NioSocketChannel.<init>(NioSocketChannel.java:80)  at io.netty.channel.socket.nio.NioSocketChannel.<init>(NioSocketChannel.java:73)  at java.lang.Class.newInstance(Native Method)  at io.netty.channel.ReflectiveChannelFactory.newChannel(ReflectiveChannelFactory.java:38)  at io.netty.bootstrap.AbstractBootstrap.initAndRegister(AbstractBootstrap.java:319)  at io.netty.bootstrap.Bootstrap.doResolveAndConnect(Bootstrap.java:163)  at io.netty.bootstrap.Bootstrap.connect(Bootstrap.java:119)  at com.client.Client.connect(Client.java:43)  at com.example.shx.serachnote.Main.onCreate(Main.java:43)  at android.app.Activity.performCreate(Activity.java:6664)  at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1118)  at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2599)  at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2707)  at android.app.ActivityThread.-wrap12(ActivityThread.java)  at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1460)  at android.os.Handler.dispatchMessage(Handler.java:102)  at android.os.Looper.loop(Looper.java:154)  at android.app.ActivityThread.main(ActivityThread.java:6077)  at java.lang.reflect.Method.invoke(Native Method)  at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:865)  at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:755) 

maven聚合的时候的时候报错

严重: The ServletContentInitializer [org.springframework.web.SpringServletContainerInitializer] could not be created java.lang.ClassNotFoundException: org.springframework.web.SpringServletContainerInitializer at org.apache.catalina.loader.WebappClassLoader.loadClass(WebappClassLoader.java:1702) at org.apache.catalina.loader.WebappClassLoader.loadClass(WebappClassLoader.java:1547) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:348) at org.apache.catalina.startup.ContextConfig.getServletContainerInitializer(ContextConfig.java:1668) at org.apache.catalina.startup.ContextConfig.getServletContainerInitializers(ContextConfig.java:1652) at org.apache.catalina.startup.ContextConfig.processServletContainerInitializers(ContextConfig.java:1562) at org.apache.catalina.startup.ContextConfig.webConfig(ContextConfig.java:1270) at org.apache.catalina.startup.ContextConfig.configureStart(ContextConfig.java:878) at org.apache.catalina.startup.ContextConfig.lifecycleEvent(ContextConfig.java:376) at org.apache.catalina.util.LifecycleSupport.fireLifecycleEvent(LifecycleSupport.java:119) at org.apache.catalina.util.LifecycleBase.fireLifecycleEvent(LifecycleBase.java:90) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5322) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:150) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1559) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1549) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) 九月 05, 2016 3:40:24 下午 org.apache.catalina.startup.ContextConfig processServletContainerInitializers 严重: Failed to process JAR found at URL [jar:file:/G:/repo/org/springframework/spring-web/4.1.3.RELEASE/spring-web-4.1.3.RELEASE.jar!/] for ServletContainerInitializers for context with name [] 九月 05, 2016 3:40:24 下午 org.apache.catalina.startup.ContextConfig configureStart 严重: Marking this application unavailable due to previous error(s) 九月 05, 2016 3:40:24 下午 org.apache.catalina.core.StandardContext startInternal 严重: Error getConfigured 九月 05, 2016 3:40:24 下午 org.apache.catalina.core.StandardContext startInternal 严重: Context [] startup failed due to previous errors 九月 05, 2016 3:40:24 下午 org.apache.catalina.loader.WebappClassLoader clearReferencesJdbc 严重: The web application [] registered the JDBC driver [com.mysql.jdbc.Driver] but failed to unregister it when the web application was stopped. To prevent a memory leak, the JDBC Driver has been forcibly unregistered. 九月 05, 2016 3:40:24 下午 org.apache.catalina.loader.WebappClassLoader clearReferencesJdbc 严重: The web application [] registered the JDBC driver [com.mysql.fabric.jdbc.FabricMySQLDriver] but failed to unregister it when the web application was stopped. To prevent a memory leak, the JDBC Driver has been forcibly unregistered. 九月 05, 2016 3:40:24 下午 org.apache.catalina.loader.WebappClassLoader clearReferencesJdbc 严重: The web application [] registered the JDBC driver [com.alibaba.druid.proxy.DruidDriver] but failed to unregister it when the web application was stopped. To prevent a memory leak, the JDBC Driver has been forcibly unregistered. 九月 05, 2016 3:40:24 下午 org.apache.catalina.loader.WebappClassLoader clearReferencesJdbc 严重: The web application [] registered the JDBC driver [com.alibaba.druid.mock.MockDriver] but failed to unregister it when the web application was stopped. To prevent a memory leak, the JDBC Driver has been forcibly unregistered. 九月 05, 2016 3:40:24 下午 org.apache.catalina.loader.WebappClassLoader clearReferencesThreads 严重: The web application [] appears to have started a thread named [Abandoned connection cleanup thread] but has failed to stop it. This is very likely to create a memory leak. 九月 05, 2016 3:40:24 下午 org.apache.coyote.AbstractProtocol start 信息: Starting ProtocolHandler ["http-bio-8080"]

saprk集群执行任务时slaves显示拒绝连接,求解决!!!

搭建完spark HA 集群后执行任务显示master拒绝访问 集群的两台slaves都是这种情况,求解决 ![图片说明](https://img-ask.csdn.net/upload/202003/12/1583997250_677288.png) 下面为查看spark日志所显示的信息 ``` 2020-03-12 15:00:13 INFO ZooKeeper:438 - Initiating client connection, connectString=Hadoop01:2181,Hadoop02:2181,Hadoop03:2181 sessionTimeout=60000 watcher=org.apache.curator.ConnectionState@54ff5e34 2020-03-12 15:00:13 INFO ClientCnxn:975 - Opening socket connection to server Hadoop01/192.168.128.151:2181. Will not attempt to authenticate using SASL (unknown error) 2020-03-12 15:00:13 INFO ClientCnxn:852 - Socket connection established to Hadoop01/192.168.128.151:2181, initiating session 2020-03-12 15:00:14 INFO ClientCnxn:1235 - Session establishment complete on server Hadoop01/192.168.128.151:2181, sessionid = 0x170cd8aa81b0000, negotiated timeout = 40000 2020-03-12 15:00:14 INFO ConnectionStateManager:228 - State change: CONNECTED 2020-03-12 15:00:16 INFO ZooKeeperLeaderElectionAgent:54 - Starting ZooKeeper LeaderElection agent 2020-03-12 15:00:16 INFO CuratorFrameworkImpl:224 - Starting 2020-03-12 15:00:16 INFO ZooKeeper:438 - Initiating client connection, connectString=Hadoop01:2181,Hadoop02:2181,Hadoop03:2181 sessionTimeout=60000 watcher=org.apache.curator.ConnectionState@1a1c9fb4 2020-03-12 15:00:16 INFO ClientCnxn:975 - Opening socket connection to server Hadoop01/192.168.128.151:2181. Will not attempt to authenticate using SASL (unknown error) 2020-03-12 15:00:16 INFO ClientCnxn:852 - Socket connection established to Hadoop01/192.168.128.151:2181, initiating session 2020-03-12 15:00:16 INFO ClientCnxn:1235 - Session establishment complete on server Hadoop01/192.168.128.151:2181, sessionid = 0x170cd8aa81b0001, negotiated timeout = 40000 2020-03-12 15:00:16 INFO ConnectionStateManager:228 - State change: CONNECTED 2020-03-12 15:00:20 INFO ZooKeeperLeaderElectionAgent:54 - We have gained leadership 2020-03-12 15:00:20 INFO Master:54 - I have been elected leader! New state: RECOVERING 2020-03-12 15:00:20 INFO Master:54 - Trying to recover worker: worker-20200311210734-192.168.128.152-53095 2020-03-12 15:00:20 INFO Master:54 - Trying to recover worker: worker-20200311210734-192.168.128.153-51359 2020-03-12 15:00:21 WARN OneWayOutboxMessage:87 - Failed to send one-way RPC. java.io.IOException: Failed to connect to /192.168.128.152:53095 at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245) at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187) at org.apache.spark.rpc.netty.NettyRpcEnv.createClient(NettyRpcEnv.scala:198) at org.apache.spark.rpc.netty.Outbox$$anon$1.call(Outbox.scala:194) at org.apache.spark.rpc.netty.Outbox$$anon$1.call(Outbox.scala:190) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: io.netty.channel.AbstractChannel$AnnotatedConnectException: 拒绝连接: /192.168.128.152:53095 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717) at io.netty.channel.socket.nio.NioSocketChannel.doFinishConnect(NioSocketChannel.java:323) at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:340) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:633) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:580) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:497) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:459) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) ... 1 more Caused by: java.net.ConnectException: 拒绝连接 ... 11 more 2020-03-12 15:00:21 WARN OneWayOutboxMessage:87 - Failed to send one-way RPC. java.io.IOException: Failed to connect to /192.168.128.153:51359 at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245) at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187) at org.apache.spark.rpc.netty.NettyRpcEnv.createClient(NettyRpcEnv.scala:198) at org.apache.spark.rpc.netty.Outbox$$anon$1.call(Outbox.scala:194) at org.apache.spark.rpc.netty.Outbox$$anon$1.call(Outbox.scala:190) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: io.netty.channel.AbstractChannel$AnnotatedConnectException: 拒绝连接: /192.168.128.153:51359 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717) at io.netty.channel.socket.nio.NioSocketChannel.doFinishConnect(NioSocketChannel.java:323) at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:340) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:633) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:580) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:497) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:459) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) ... 1 more Caused by: java.net.ConnectException: 拒绝连接 ... 11 more 2020-03-12 15:00:24 INFO Master:54 - Registering worker 192.168.128.152:46027 with 2 cores, 1024.0 MB RAM 2020-03-12 15:00:26 INFO Master:54 - Registering worker 192.168.128.153:59036 with 2 cores, 1024.0 MB RAM 2020-03-12 15:01:21 INFO Master:54 - Removing worker worker-20200311210734-192.168.128.152-53095 on 192.168.128.152:53095 2020-03-12 15:01:21 INFO Master:54 - Telling app of lost worker: worker-20200311210734-192.168.128.152-53095 2020-03-12 15:01:21 INFO Master:54 - Removing worker worker-20200311210734-192.168.128.153-51359 on 192.168.128.153:51359 2020-03-12 15:01:21 INFO Master:54 - Telling app of lost worker: worker-20200311210734-192.168.128.153-51359 2020-03-12 15:01:21 INFO Master:54 - Recovery complete - resuming operations! 2020-03-12 15:05:05 INFO Master:54 - Registering app Spark Pi 2020-03-12 15:05:05 INFO Master:54 - Registered app Spark Pi with ID app-20200312150505-0000 2020-03-12 15:05:05 INFO Master:54 - Launching executor app-20200312150505-0000/0 on worker worker-20200312150020-192.168.128.153-59036 2020-03-12 15:05:16 INFO Master:54 - Received unregister request from application app-20200312150505-0000 2020-03-12 15:05:16 INFO Master:54 - Removing app app-20200312150505-0000 2020-03-12 15:05:16 WARN OneWayOutboxMessage:87 - Failed to send one-way RPC. java.io.IOException: Failed to connect to /192.168.128.153:51359 at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245) at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187) at org.apache.spark.rpc.netty.NettyRpcEnv.createClient(NettyRpcEnv.scala:198) at org.apache.spark.rpc.netty.Outbox$$anon$1.call(Outbox.scala:194) at org.apache.spark.rpc.netty.Outbox$$anon$1.call(Outbox.scala:190) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: io.netty.channel.AbstractChannel$AnnotatedConnectException: 拒绝连接: /192.168.128.153:51359 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717) at io.netty.channel.socket.nio.NioSocketChannel.doFinishConnect(NioSocketChannel.java:323) at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:340) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:633) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:580) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:497) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:459) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) ... 1 more Caused by: java.net.ConnectException: 拒绝连接 ... 11 more 2020-03-12 15:05:16 WARN OneWayOutboxMessage:87 - Failed to send one-way RPC. java.io.IOException: Failed to connect to /192.168.128.152:53095 at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245) at org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187) at org.apache.spark.rpc.netty.NettyRpcEnv.createClient(NettyRpcEnv.scala:198) at org.apache.spark.rpc.netty.Outbox$$anon$1.call(Outbox.scala:194) at org.apache.spark.rpc.netty.Outbox$$anon$1.call(Outbox.scala:190) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: io.netty.channel.AbstractChannel$AnnotatedConnectException: 拒绝连接: /192.168.128.152:53095 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717) at io.netty.channel.socket.nio.NioSocketChannel.doFinishConnect(NioSocketChannel.java:323) at io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:340) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:633) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:580) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:497) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:459) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) ... 1 more Caused by: java.net.ConnectException: 拒绝连接 ... 11 more 2020-03-12 15:05:16 INFO Master:54 - 192.168.128.151:55888 got disassociated, removing it. 2020-03-12 15:05:16 INFO Master:54 - Hadoop01:39679 got disassociated, removing it. 2020-03-12 15:05:16 WARN Master:66 - Got status update for unknown executor app-20200312150505-0000/0 ```

报错,求大神指点,发布项目的时候

``` 2016-05-18 18:05:56.711:INFO:/:Initializing log4j system 2016-05-18 18:05:56.714:INFO:/:Could not find log4j configuration file "/WEB-INF/log4j.xml" in webapp context. Using default configurations. INFO: configuring "log4j" using jar:file:/D:/maven/mvnRespo/com/alibaba/citrus/citrus-webx-all/3.0.9/citrus-webx-all-3.0.9.jar!/com/alibaba/citrus/logconfig/log4j/log4j-default.xml - with property localAddress = 100.81.168.223 - with property localHost = CP-WB179986-01 - with property log.level = INFO - with property log4j.defaultInitOverride = true - with property log_print_to_console = log_print_to_console - with property loggingCharset = UTF-8 - with property loggingLevel = INFO - with property loggingRoot = D:\Users\wb-hqm179986\logs 2016-05-18 18:05:56.898:INFO:/:Initializing Spring root WebApplicationContext Web Context替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\webx.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\webx.xml】 HSFJettyWebAppContext replace servlet context get file /C:/Users/WB-HQM~1/AppData/Local/Temp/hsf_jetty_placeholder/WEB-INF/webx.xml Web Context替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\common\webx-component-and-root.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\common\webx-component-and-root.xml】 HSFJettyWebAppContext replace servlet context get file /C:/Users/WB-HQM~1/AppData/Local/Temp/hsf_jetty_placeholder/WEB-INF/common/webx-component-and-root.xml 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\common\webx-component-and-root.xml】 HSFJettyWebAppContext replace servlet context get file /C:/Users/WB-HQM~1/AppData/Local/Temp/hsf_jetty_placeholder/WEB-INF/common/webx-component-and-root.xml Web Context替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\common\uris.xml】 HSFJettyWebAppContext replace servlet context get file /C:/Users/wb-hqm179986/AppData/Local/Temp/hsf_jetty_placeholder/WEB-INF/common/uris.xml HSFJettyWebAppContext replace servlet context get file /C:/Users/wb-hqm179986/AppData/Local/Temp/hsf_jetty_placeholder/WEB-INF/common/uris.xml HSFJettyWebAppContext replace servlet context get file /C:/Users/wb-hqm179986/AppData/Local/Temp/hsf_jetty_placeholder/WEB-INF/ HSFJettyWebAppContext replace servlet context get file /C:/Users/wb-hqm179986/AppData/Local/Temp/hsf_jetty_placeholder/WEB-INF/ Web Context替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\webx-bizconsole.xml】 2016-05-18 18:06:10.255:INFO:/:Initializing Spring sub WebApplicationContext: bizconsole 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\webx-bizconsole.xml】 HSFJettyWebAppContext replace servlet context get file /C:/Users/WB-HQM~1/AppData/Local/Temp/hsf_jetty_placeholder/WEB-INF/webx-bizconsole.xml 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\common\webx-component-and-root.xml】 HSFJettyWebAppContext replace servlet context get file /C:/Users/WB-HQM~1/AppData/Local/Temp/hsf_jetty_placeholder/WEB-INF/common/webx-component-and-root.xml Web Context替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\WEB-INF\common\webx-component.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\config\hsf\buc-client-hsf.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\config\hsf\buc-client-hsf.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\config\hsf\buc-client-hsf.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\config\hsf\biz-hsf-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\config\hsf\biz-hsf-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\config\hsf\biz-hsf-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\config\hsf\forest-hsf-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\config\hsf\forest-hsf-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\config\hsf\forest-hsf-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\spring\common\sls-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\spring\common\sls-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\spring\common\sls-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\spring\common\keycenter-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\spring\common\keycenter-client.xml】 替换文件到【C:\Users\WB-HQM~1\AppData\Local\Temp\hsf_jetty_placeholder\spring\common\keycenter-client.xml】 JM.Log:INFO Init JM logger with Slf4jLoggerFactory JM.Log:INFO Log root path: D:\Users\wb-hqm179986\logs\ JM.Log:INFO Set diamond-client log path: D:\Users\wb-hqm179986\logs\diamond-client JM.Log:INFO Init JM logger with Slf4jLoggerFactory JM.Log:INFO Log root path: D:\Users\wb-hqm179986\logs\ JM.Log:INFO Set hsf log path: D:\Users\wb-hqm179986\logs\hsf 18:06:25.305 [main] DEBUG i.n.u.i.l.InternalLoggerFactory - Using SLF4J as the default logging framework 18:06:25.308 [main] DEBUG i.n.c.MultithreadEventLoopGroup - -Dio.netty.eventLoopThreads: 8 18:06:25.333 [main] DEBUG i.n.util.internal.PlatformDependent0 - java.nio.Buffer.address: available 18:06:25.334 [main] DEBUG i.n.util.internal.PlatformDependent0 - sun.misc.Unsafe.theUnsafe: available 18:06:25.334 [main] DEBUG i.n.util.internal.PlatformDependent0 - sun.misc.Unsafe.copyMemory: available 18:06:25.334 [main] DEBUG i.n.util.internal.PlatformDependent0 - java.nio.Bits.unaligned: true 18:06:25.334 [main] DEBUG i.n.util.internal.PlatformDependent - Platform: Windows 18:06:25.334 [main] DEBUG i.n.util.internal.PlatformDependent - Java version: 7 18:06:25.335 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.noUnsafe: false 18:06:25.335 [main] DEBUG i.n.util.internal.PlatformDependent - sun.misc.Unsafe: available 18:06:25.335 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.noJavassist: false 18:06:25.664 [main] DEBUG i.n.util.internal.PlatformDependent - Javassist: available 18:06:25.664 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.tmpdir: C:\Users\WB-HQM~1\AppData\Local\Temp (java.io.tmpdir) 18:06:25.664 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.bitMode: 64 (sun.arch.data.model) 18:06:25.664 [main] DEBUG i.n.util.internal.PlatformDependent - -Dio.netty.noPreferDirect: false 18:06:25.758 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.noKeySetOptimization: false 18:06:25.759 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.selectorAutoRebuildThreshold: 512 18:06:25.802 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetectionLevel: simple 18:06:25.967 [main] DEBUG i.n.u.i.JavassistTypeParameterMatcherGenerator - Generated: io.netty.util.internal.__matchers__.com.taobao.hsf.remoting.BaseRequestMatcher 18:06:25.994 [main] DEBUG i.n.u.i.JavassistTypeParameterMatcherGenerator - Generated: io.netty.util.internal.__matchers__.com.taobao.hsf.remoting.netty.server.http.domain.NettyHttpRpcRequestMatcher JM.Log:INFO Log root path: D:\Users\wb-hqm179986\logs\ JM.Log:INFO Init JM logger with Slf4jLoggerFactory JM.Log:INFO Set configclient log path: D:\Users\wb-hqm179986\logs\configclient Exception in thread "HSF-Remoting-Timer-6-thread-1" Exception in thread "BufferedStatLogWriter-Flush-Timer" 2016-05-18 18:07:48.650:WARN::failed runjettyrun.HSFJettyWebAppContext@6aaaa67b{/,src/main/webapp}: java.lang.OutOfMemoryError: PermGen space 2016-05-18 18:07:48.651:WARN::Error starting handlers java.lang.OutOfMemoryError: PermGen space 2016-05-18 18:07:49.189:WARN::failed org.mortbay.jetty.nio.SelectChannelConnector$1@360987dc: java.lang.OutOfMemoryError: PermGen space 2016-05-18 18:07:49.190:WARN::failed SelectChannelConnector@0.0.0.0:8081: java.lang.OutOfMemoryError: PermGen space Exception in thread "-thread-2" 2016-05-18 18:07:52.172:WARN::failed Ajp13SocketConnector@0.0.0.0:8009: java.lang.OutOfMemoryError: PermGen space 2016-05-18 18:07:54.105:WARN::failed Server@1b897a77: org.mortbay.util.MultiException[java.lang.OutOfMemoryError: PermGen space, java.lang.OutOfMemoryError: PermGen space] Exception in thread "main" ```

netty4,异常断线的问题.

我写了个消息转发程序,也加了心跳处理,1分钟没读写操作的用户自动被踢下线. 可是现在遇到一个问题,如A用户要发送消息给B用户,通过服务器中转, 服务器在接收到A的消息时,B用户实际已经断网了(我把B用户的网线拔掉了), 这时服务器既然是不知道的. [code="java"] ChannelFuture writeFuture = channel.write(msg); final Channel sendChannel = ctx.channel(); writeFuture.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { if (future.isSuccess()){ sendChannel.write(JSONResult.getSuccess()); System.err.println("future.isSuccess()"); } if (future.isDone()){ sendChannel.write(JSONResult.getSuccess()); System.err.println("future.isDone()"); } if (future.isCancelled()){ sendChannel.write(JSONResult.getSuccess()); System.err.println("future.isCancelled()"); } } }); [/code] operationComplete都会返回成功..只有到了1分钟的时候,服务器才回把B用户踢下线.我要怎么才能在服务端判断B用户实际已经掉线了??

Logstash与tcp,错误:java.io.IOException:连接被对等方重置

<div class="post-text" itemprop="text"> <p>我想在我的GolangApp中使用logstash。</p> <p>/etc/logstash/conf.d/first-pipeline.conf </p> <pre><code>input { tcp { port =&gt; 5959 codec =&gt; json } } #filter {} output { elasticsearch { hosts =&gt; [ "localhost:9200" ] } } </code></pre> <p>和用于运行logstash的命令:</p> <p>/usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/first-pipeline.conf --path.settings=/etc/logstash</p> <pre><code> Sending Logstash logs to /var/log/logstash which is now configured via log4j2.properties [2018-12-09T09:11:14,984][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified [2018-12-09T09:11:14,995][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=&gt;"6.5.1"} [2018-12-09T09:11:16,968][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=&gt;"main", "pipeline.workers"=&gt;4, "pipeline.batch.size"=&gt;125, "pipeline.batch.delay"=&gt;50} [2018-12-09T09:11:17,347][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=&gt;{:removed=&gt;[], :added=&gt;[http://localhost:9200/]}} [2018-12-09T09:11:17,356][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=&gt;http://localhost:9200/, :path=&gt;"/"} [2018-12-09T09:11:17,589][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=&gt;"http://localhost:9200/"} [2018-12-09T09:11:17,655][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=&gt;6} [2018-12-09T09:11:17,660][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=&gt;6} [2018-12-09T09:11:17,692][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=&gt;"LogStash::Outputs::ElasticSearch", :hosts=&gt;["//localhost:9200"]} [2018-12-09T09:11:17,730][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=&gt;nil} [2018-12-09T09:11:17,786][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=&gt;{"template"=&gt;"logstash-*", "version"=&gt;60001, "settings"=&gt;{"index.refresh_interval"=&gt;"5s"}, "mappings"=&gt;{"_default_"=&gt;{"dynamic_templates"=&gt;[{"message_field"=&gt;{"path_match"=&gt;"message", "match_mapping_type"=&gt;"string", "mapping"=&gt;{"type"=&gt;"text", "norms"=&gt;false}}}, {"string_fields"=&gt;{"match"=&gt;"*", "match_mapping_type"=&gt;"string", "mapping"=&gt;{"type"=&gt;"text", "norms"=&gt;false, "fields"=&gt;{"keyword"=&gt;{"type"=&gt;"keyword", "ignore_above"=&gt;256}}}}}], "properties"=&gt;{"@timestamp"=&gt;{"type"=&gt;"date"}, "@version"=&gt;{"type"=&gt;"keyword"}, "geoip"=&gt;{"dynamic"=&gt;true, "properties"=&gt;{"ip"=&gt;{"type"=&gt;"ip"}, "location"=&gt;{"type"=&gt;"geo_point"}, "latitude"=&gt;{"type"=&gt;"half_float"}, "longitude"=&gt;{"type"=&gt;"half_float"}}}}}}}} [2018-12-09T09:11:17,809][INFO ][logstash.inputs.tcp ] Automatically switching from json to json_lines codec {:plugin=&gt;"tcp"} [2018-12-09T09:11:17,862][INFO ][logstash.inputs.tcp ] Starting tcp input listener {:address=&gt;"0.0.0.0:5959", :ssl_enable=&gt;"false"} [2018-12-09T09:11:18,097][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=&gt;"main", :thread=&gt;"#&lt;Thread:0x42d68f8e run&gt;"} [2018-12-09T09:11:18,157][INFO ][logstash.agent ] Pipelines running {:count=&gt;1, :running_pipelines=&gt;[:main], :non_running_pipelines=&gt;[]} [2018-12-09T09:11:18,329][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=&gt;9600} </code></pre> <p>Code in golang:</p> <pre><code>import ( "encoding/json" "fmt" "github.com/heatxsink/go-logstash" "time" ) func main() { l := logstash.New("0.0.0.0", 5959, 5) _, err := l.Connect() if err != nil { fmt.Println(err) } dataMap := map[string]int{"apple": 5, "lettuce": 7} jsonMap, _ := json.Marshal(dataMap) err = l.Writeln(string(jsonMap)) if err != nil { fmt.Println(err) } } </code></pre> <p>当我试图请求登录应用程序时,终端会显示这个错误:</p> <blockquote> <p>[2018-12-09T09:12:41,954][ERROR][logstash.inputs.tcp ] Error in Netty pipeline: java.io.IOException: Connection reset by peer</p> </blockquote> <p>所有的东西都在我的本地系统中。请帮帮我!</p> </div>

java netty NoClassDefFoundError 找不到eventloop class

在eclipse上用用netty框架写了个demo,直接在eclipse上能编译运行成功,导出了个jar包, 用java -jar 运行后,报出了如下错误: Error: A JNI error has occurred, please check your installation and try again Exception in thread "main" java.lang.NoClassDefFoundError: io/netty/channel/Even tLoopGroup at java.lang.Class.getDeclaredMethods0(Native Method) at java.lang.Class.privateGetDeclaredMethods(Unknown Source) at java.lang.Class.privateGetMethodRecursive(Unknown Source) at java.lang.Class.getMethod0(Unknown Source) at java.lang.Class.getMethod(Unknown Source) at sun.launcher.LauncherHelper.validateMainClass(Unknown Source) at sun.launcher.LauncherHelper.checkAndLoadMain(Unknown Source) Caused by: java.lang.ClassNotFoundException: io.netty.channel.EventLoopGroup at java.net.URLClassLoader.findClass(Unknown Source) at java.lang.ClassLoader.loadClass(Unknown Source) at sun.misc.Launcher$AppClassLoader.loadClass(Unknown Source) at java.lang.ClassLoader.loadClass(Unknown Source) ... 7 more 最开始以为是包没打好, 又重新在manifest里面定义了一下classpath,完事一样还是找不到eventloopgroup class,我工程里面就引用了一个netty-5.0.0.Alpha2,我看了一下这个包,class文件好好的静静的就躺在那里........为什么它找不到,求大神们给个解决方案。。。。

Netty5客户端第二次连接服务端异常

用Netty5开发通信框架时出现客户端强制关闭程序,服务端捕获到客户端断开连接并关闭channel;客户端再次启动程序连接服务端会连接不上 服务端代码: ![启动服务](https://img-ask.csdn.net/upload/201607/09/1468037617_277162.png) 服务端handler: ![服务端handler:](https://img-ask.csdn.net/upload/201607/09/1468038034_658063.png) 客户端代码: ![启动客户端](https://img-ask.csdn.net/upload/201607/09/1468038203_298005.png) 客户端handler: ![客户端handler](https://img-ask.csdn.net/upload/201607/09/1468038310_93974.png) 先启动服务端,等待客户端连接 第一次启动客户端建立连接,连接建立时客户端正常调用channelActive方法发送数据,服务端正常接口数据并返回响应,客户端正常接收响应。当客户端程序执行完成,关闭程序,服务端可以捕获到客户端断开的信息: ![服务端成功捕获客户端断开](https://img-ask.csdn.net/upload/201607/09/1468038938_389467.png) 服务端捕获客户端断开后作出处理,清理channel并关闭ChannelHandlerContext并抛出异常: ![客户端断开连接后服务端抛出的异常](https://img-ask.csdn.net/upload/201607/09/1468039139_985584.png) 再次启动客户端程序连接服务端,程序依然可以进到channelActive发送消息,(channelActive触发是否可以确定已经建立了TCP连接?) ![第二次连接依然可以触发channelActive](https://img-ask.csdn.net/upload/201607/09/1468039445_679816.png) 然而虽然已经向服务端writeAndFlush,但是服务端并没有收到任何消息,客户端会抛出异常: ![客户端第二次启动抛出的异常](https://img-ask.csdn.net/upload/201607/09/1468040066_459885.png) 跟踪异常,是客户端在向chennal写数据时抛出错误! 为什么我服务端已经清理了客户端信息并关闭了通道,客户端程序第二次连接发送数据会出现异常呢? 求大神解答!

netty:一个包的数据,channelRead回调的bytebuf数据不完整?

![图片说明](https://img-ask.csdn.net/upload/201904/17/1555466944_332865.png) ``` <netty.version>4.1.32.Final</netty.version> ``` 第一次channelRead读取的数据: ``` FEFCBFFFFFFFFEFFFEFFDCFFF5FFC8FFFDFFFAFFFCFBFFF8DDFFFFFFFFFFFFFFEFFFE0FFE0FFFFFFF4FFF4FFEFFFF0FFFFFEA1FFFF8AEBF246061C44B2E8493F ``` 第二次channelRead读取的数据: ``` A5540814A1 ``` 两次读取才是一个包的完整数据 一个包的数据,被分为两次读取,这个结论是通过抓包得到的 虽然问题得到解决,但是不知道为何会发生这种情况 请群里大神解答一下 因使用的是最简单的tcp文本协议, 没有包头和包长度 采用的方案是判断接收一个包的字符串长度 [使用netty4.x客户端接收较大数据量报文时发生的读取不完整bug修复记录](https://www.cnblogs.com/GoQC/p/6137160.html ) 我是通过这个文章的思路解决的问题

netty在运行一段时候以后就会卡死,请大佬帮忙看看是怎么回事

服务端在运行若干小时以后就会卡死,不管客户端再怎么请求也收不到任何返回。主要是出现在业务逻辑线程池里: 我先放出线程堆栈: Full thread dump Java HotSpot(TM) 64-Bit Server VM (25.121-b13 mixed mode): "Attach Listener" #82 daemon prio=9 os_prio=0 tid=0x00007fe4e4001000 nid=0x5f1 waiting on condition [0x0000000000000000] java.lang.Thread.State: RUNNABLE "nioEventLoopGroup-4-32" #74 prio=10 os_prio=0 tid=0x00007fe46403c800 nid=0x6e28 runnable [0x00007fe45814d000] java.lang.Thread.State: RUNNABLE at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) - locked <0x0000000080b08d08> (a io.netty.channel.nio.SelectedSelectionKeySet) - locked <0x0000000080b09dc0> (a java.util.Collections$UnmodifiableSet) - locked <0x0000000080b08c10> (a sun.nio.ch.EPollSelectorImpl) at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) at io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:62) at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:752) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:408) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-31" #72 prio=10 os_prio=0 tid=0x00007fe46403a800 nid=0x6dc2 waiting on condition [0x00007fe45818d000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:297) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:413) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:265) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-30" #71 prio=10 os_prio=0 tid=0x00007fe464038800 nid=0x6cd0 waiting on condition [0x00007fe4581ce000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:284) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-29" #70 prio=10 os_prio=0 tid=0x00007fe464037000 nid=0x6cae waiting on condition [0x00007fe45820f000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:297) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:413) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:265) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-28" #69 prio=10 os_prio=0 tid=0x00007fe464036000 nid=0x6c70 waiting on condition [0x00007fe458291000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:297) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:413) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:265) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-27" #68 prio=10 os_prio=0 tid=0x00007fe464034000 nid=0x6c1d waiting on condition [0x00007fe458250000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:297) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:413) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:265) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-26" #66 prio=10 os_prio=0 tid=0x00007fe464032800 nid=0x6bf4 waiting on condition [0x00007fe4582d2000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:297) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:413) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:265) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-25" #65 prio=10 os_prio=0 tid=0x00007fe464030800 nid=0x6be5 waiting on condition [0x00007fe458313000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getKeysList(RedisManager.java:324) at com.game.handler.DrawTaskRewardHandler.hander(DrawTaskRewardHandler.java:51) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:284) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-24" #64 prio=10 os_prio=0 tid=0x00007fe46402f000 nid=0x6bd9 runnable [0x00007fe458355000] java.lang.Thread.State: RUNNABLE at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) - locked <0x0000000080c7c3b0> (a io.netty.channel.nio.SelectedSelectionKeySet) - locked <0x0000000080c7c3a0> (a java.util.Collections$UnmodifiableSet) - locked <0x0000000080c7c3c8> (a sun.nio.ch.EPollSelectorImpl) at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) at io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:62) at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:752) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:408) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-23" #63 prio=10 os_prio=0 tid=0x00007fe46402d000 nid=0x6b77 waiting on condition [0x00007fe458395000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:297) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:413) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:265) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-22" #62 prio=10 os_prio=0 tid=0x00007fe46402b000 nid=0x6b25 runnable [0x00007fe4583d7000] java.lang.Thread.State: RUNNABLE at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) - locked <0x0000000080c19bb8> (a io.netty.channel.nio.SelectedSelectionKeySet) - locked <0x0000000080c19ba8> (a java.util.Collections$UnmodifiableSet) - locked <0x0000000080c19bd0> (a sun.nio.ch.EPollSelectorImpl) at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) at io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:62) at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:752) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:408) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-21" #61 prio=10 os_prio=0 tid=0x00007fe46402a000 nid=0x6ae7 waiting on condition [0x00007fe458417000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:284) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-20" #59 prio=10 os_prio=0 tid=0x00007fe464028800 nid=0x6ab6 waiting on condition [0x00007fe458ab5000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:297) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:413) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:265) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-19" #58 prio=10 os_prio=0 tid=0x00007fe464026800 nid=0x6aad waiting on condition [0x00007fe458af6000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:297) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:413) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:265) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-18" #57 prio=10 os_prio=0 tid=0x00007fe464025000 nid=0x6aac runnable [0x00007fe458b38000] java.lang.Thread.State: RUNNABLE at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) - locked <0x000000008086c6f0> (a io.netty.channel.nio.SelectedSelectionKeySet) - locked <0x000000008086c6e0> (a java.util.Collections$UnmodifiableSet) - locked <0x000000008086c708> (a sun.nio.ch.EPollSelectorImpl) at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) at io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:62) at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:752) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:408) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-17" #56 prio=10 os_prio=0 tid=0x00007fe464023000 nid=0x6a9a waiting on condition [0x00007fe458b78000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:297) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:413) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:265) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-16" #55 prio=10 os_prio=0 tid=0x00007fe464021800 nid=0x6a87 runnable [0x00007fe458bba000] java.lang.Thread.State: RUNNABLE at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) - locked <0x00000000807dcf40> (a io.netty.channel.nio.SelectedSelectionKeySet) - locked <0x00000000807dcf30> (a java.util.Collections$UnmodifiableSet) - locked <0x00000000807dcf58> (a sun.nio.ch.EPollSelectorImpl) at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) at io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:62) at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:752) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:408) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-15" #54 prio=10 os_prio=0 tid=0x00007fe46401f800 nid=0x6a02 runnable [0x00007fe458bfb000] java.lang.Thread.State: RUNNABLE at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) - locked <0x00000000808b60d8> (a io.netty.channel.nio.SelectedSelectionKeySet) - locked <0x00000000808ae818> (a java.util.Collections$UnmodifiableSet) - locked <0x00000000808b6040> (a sun.nio.ch.EPollSelectorImpl) at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) at io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:62) at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:752) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:408) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-14" #53 prio=10 os_prio=0 tid=0x00007fe46401d800 nid=0x69ee runnable [0x00007fe4e806a000] java.lang.Thread.State: RUNNABLE at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) - locked <0x0000000080866e78> (a io.netty.channel.nio.SelectedSelectionKeySet) - locked <0x0000000080866e68> (a java.util.Collections$UnmodifiableSet) - locked <0x0000000080866e90> (a sun.nio.ch.EPollSelectorImpl) at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) at io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:62) at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:752) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:408) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-13" #52 prio=10 os_prio=0 tid=0x00007fe46401c000 nid=0x69cb waiting on condition [0x00007fe4e80aa000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:284) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-12" #51 prio=10 os_prio=0 tid=0x00007fe46401a800 nid=0x6928 waiting on condition [0x00007fe4e80eb000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:284) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-11" #50 prio=10 os_prio=0 tid=0x00007fe464018800 nid=0x68fe runnable [0x00007fe4e812d000] java.lang.Thread.State: RUNNABLE at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) - locked <0x0000000080036480> (a io.netty.channel.nio.SelectedSelectionKeySet) - locked <0x0000000080036470> (a java.util.Collections$UnmodifiableSet) - locked <0x0000000080036428> (a sun.nio.ch.EPollSelectorImpl) at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) at io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:62) at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:752) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:408) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-10" #49 prio=10 os_prio=0 tid=0x00007fe464017000 nid=0x68dc waiting on condition [0x00007fe4e816d000] java.lang.Thread.State: WAITING (parking) at sun.misc.Unsafe.park(Native Method) - parking to wait for <0x000000008012d6d8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039) at org.apache.commons.pool2.impl.LinkedBlockingDeque.takeFirst(LinkedBlockingDeque.java:587) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:440) at org.apache.commons.pool2.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:361) at redis.clients.jedis.util.Pool.getResource(Pool.java:50) at redis.clients.jedis.JedisPool.getResource(JedisPool.java:234) at com.game.redis.RedisManager.getString(RedisManager.java:219) at com.game.handler.CapitalHandler.hander(CapitalHandler.java:36) at com.game.netty.TcpServerHanler.channelRead(TcpServerHanler.java:107) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:284) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:134) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) "nioEventLoopGroup-4-9" #48 prio=10 os_prio=0 tid=0x00007fe464015000 nid=0x68c7 runnable [0x00007fe4e81af000] java.lang.Thread.State: RUNNABLE at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method) at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269) at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:93) at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86) - locked <0x0000000080036788> (a io.netty.channel.nio.SelectedSelectionKeySet) - locked <0x0000000080036778> (a java.util.Collections$UnmodifiableSet) - locked <0x0000000080036730> (a sun.nio.ch.EPollSelectorImpl) at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97) at io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:62) at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:752) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:408) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) at java.lang.Thread.run(Thread.java:745) 线程堆栈有些没看明白,排除法先是看看线程池、数据库线程池还没想好什么想法去看。

channelFuture.awaitUninterruptibly

程序在调用下面一段代码时发生死锁,可能有哪些原因导致死锁 执行代码: channelFuture = cb.connect(new InetSocketAddress(this.host, this.port)); channelFuture.awaitUninterruptibly(connTimeout, TimeUnit.MILLISECONDS); 异常内容: 2018-03-08 14:37:19[ERROR][New I/O worker #28][com.cmbchina.ocean.transport.netty.TcpLongConnectionServerHandler:124] 异常:await*() in I/O thread causes a dead lock or sudden performance drop. Use addListener() instead or call await*() from a different thread. java.lang.IllegalStateException: await*() in I/O thread causes a dead lock or sudden performance drop. Use addListener() instead or call await*() from a different thread. at org.jboss.netty.channel.DefaultChannelFuture.checkDeadLock(DefaultChannelFuture.java:342) at org.jboss.netty.channel.DefaultChannelFuture.await0(DefaultChannelFuture.java:306) at org.jboss.netty.channel.DefaultChannelFuture.awaitUninterruptibly(DefaultChannelFuture.java:277) at com.cmbchina.ocean.transport.tcp.TCPClient.connect(TCPClient.java:351)

入坑netty但是发现坑好多啊

我这个加了心跳机制,发现这么一个问题,客户端发送给服务器端的心跳信息如果不加入\n,发现无法触发 服务器端的read0那个方法,但是一旦加上,就会报错 DefaultFullHttpRequest(decodeResult: failure(java.lang.IllegalArgumentException: invalid version format: BODY=NULL]), version: HTTP/1.0, content: UnpooledByteBufAllocator$InstrumentedUnpooledUnsafeHeapByteBuf(ridx: 0, widx: 0, cap: 0)) GET /bad-request HTTP/1.0 贴上代码 ``` protected void channelRead0(ChannelHandlerContext ctx, Object o) throws Exception { System.out.println(o); if(o instanceof FullHttpRequest){ //握手阶段 传统http协议 dealHandshake(ctx, (FullHttpRequest) o); }else if(o instanceof TextWebSocketFrame){ //文本消息处理 dealTextWebSocketFrame(ctx, (TextWebSocketFrame) o); }else if(o instanceof PingWebSocketFrame){ System.out.println("jin-------------"); //心跳消息处理 }else if(o instanceof CloseWebSocketFrame){ //断开处理 try { dealCloseWebSocketFrame(ctx); }catch (Exception e){ e.printStackTrace(); } }` ``` 这个是服务器端的代码,上面那个报错信息就是o打印出来的 ``` protected void sendPingMsg(ChannelHandlerContext ctx){ Model model = new Model(); model.setType(TypeData.PING); ctx.channel().writeAndFlush(model+"\n"); heartbeatCount++; System.out.println(name + " send ping msg to " + ctx.channel().remoteAddress() + "count :" + heartbeatCount); } ``` 这个是发送的心跳信息,加上了\n 因为网上说必须加这个,没有结束符不然无法识别

需求: MQ+ netty 做 集群推送服务

通过MQ 广播 消息 ,多个netty 消费mq消息,然后通过 channal 推动, 想问一下,这种情况下 客户端 怎么分配netty对应的ip:端口 ? 或者有没有更好的方案推荐下,谢谢。

springboot、netty、redis

springboot 整合netty搭建socket,使用redis缓存,可以正常启动,在不适用缓存时可以交互,使用缓存出异常 ``` java.lang.NoSuchMethodError: io.netty.bootstrap.Bootstrap.channel(Ljava/lang/Class;)Lio/netty/bootstrap/AbstractBootstrap; at io.lettuce.core.AbstractRedisClient.channelType(AbstractRedisClient.java:179) ~[lettuce-core-5.1.4.RELEASE.jar:?] at io.lettuce.core.RedisClient.connectStatefulAsync(RedisClient.java:304) ~[lettuce-core-5.1.4.RELEASE.jar:?] at io.lettuce.core.RedisClient.connectStandaloneAsync(RedisClient.java:271) ~[lettuce-core-5.1.4.RELEASE.jar:?] at io.lettuce.core.RedisClient.connect(RedisClient.java:204) ~[lettuce-core-5.1.4.RELEASE.jar:?] at org.springframework.data.redis.connection.lettuce.StandaloneConnectionProvider.lambda$getConnection$1(StandaloneConnectionProvider.java:113) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at java.util.Optional.orElseGet(Optional.java:267) ~[?:1.8.0_201] at org.springframework.data.redis.connection.lettuce.StandaloneConnectionProvider.getConnection(StandaloneConnectionProvider.java:113) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory$SharedConnection.getNativeConnection(LettuceConnectionFactory.java:1085) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory$SharedConnection.getConnection(LettuceConnectionFactory.java:1065) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory.getSharedConnection(LettuceConnectionFactory.java:865) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory.getConnection(LettuceConnectionFactory.java:340) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.core.RedisConnectionUtils.doGetConnection(RedisConnectionUtils.java:132) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.core.RedisConnectionUtils.getConnection(RedisConnectionUtils.java:95) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.core.RedisConnectionUtils.getConnection(RedisConnectionUtils.java:82) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.core.RedisTemplate.execute(RedisTemplate.java:211) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.core.RedisTemplate.execute(RedisTemplate.java:184) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.core.AbstractOperations.execute(AbstractOperations.java:95) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at org.springframework.data.redis.core.DefaultValueOperations.get(DefaultValueOperations.java:53) ~[spring-data-redis-2.1.5.RELEASE.jar:2.1.5.RELEASE] at com.viewhigh.hiot.elec.service.serviceimp.RedisServiceImpl.get(RedisServiceImpl.java:76) ~[classes/:?] at com.viewhigh.hiot.elec.protocol.ProtocolElecService.isCheck(ProtocolElecService.java:177) ~[classes/:?] at com.viewhigh.hiot.elec.protocol.ProtocolElecService.checkMsg(ProtocolElecService.java:161) ~[classes/:?] at com.viewhigh.hiot.elec.server.SocketServerHandler.channelRead(SocketServerHandler.java:89) ~[classes/:?] at io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelReadNow(ChannelHandlerInvokerUtil.java:74) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.DefaultChannelHandlerInvoker.invokeChannelRead(DefaultChannelHandlerInvoker.java:138) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.DefaultChannelHandlerContext.fireChannelRead(DefaultChannelHandlerContext.java:320) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:154) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelReadNow(ChannelHandlerInvokerUtil.java:74) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.DefaultChannelHandlerInvoker.invokeChannelRead(DefaultChannelHandlerInvoker.java:138) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.DefaultChannelHandlerContext.fireChannelRead(DefaultChannelHandlerContext.java:320) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:253) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelReadNow(ChannelHandlerInvokerUtil.java:74) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.DefaultChannelHandlerInvoker.invokeChannelRead(DefaultChannelHandlerInvoker.java:138) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.DefaultChannelHandlerContext.fireChannelRead(DefaultChannelHandlerContext.java:320) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:127) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:485) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:452) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:346) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:794) [netty-all-5.0.0.Alpha1.jar:5.0.0.Alpha1] at java.lang.Thread.run(Thread.java:748) [?:1.8.0_201] ``` 可能是个包冲突,但是不确定也没有找到是那个冲突,求助,帮看看

Netty channelActive 触发发送信息到客户端问题

public class WebSocketFrameHandler extends SimpleChannelInboundHandler<WebSocketFrame> { @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { new Thread(()-> ctx.channel().writeAndFlush(new TextWebSocketFrame("I am channel active"))).start(); } } 像上面那样写可以发送数据到客户端,但是 ctx.channel().writeAndFlush(new TextWebSocketFrame("I am server")); 这样写不可以发送,请问是什么问题? netty 版本: <dependency> <groupId>io.netty</groupId> <artifactId>netty-all</artifactId> <version>4.1.27.Final</version> </dependency>

netty ssl加密传输自动断开

netty的客户端和服务端已经完成ssl认证,传输数据4-5次以后自动断开,请各位帮忙看看哪有问题,下面是代码和异常 客户端: 1. 注册client的handler部分代码: SSLEngine sse = client_context.createSSLEngine(); sse.setUseClientMode(true); sc.pipeline().addLast(new ClientOutboundHandler()) .addLast(new SslHandler(sse)) // .addLast(new IdleStateHandler(30, 30, 0, // TimeUnit.SECONDS)) // .addLast(new HeartBeatHandler()) // .addLast(new ClientCodec()) .addLast(new ClientInboundHandler()) .addLast(new ClientLogicHandler()); 2. ClientInboundHandler中发送数据代码: @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) { if (evt instanceof SslHandshakeCompletionEvent) {// 测试ssl加密传输数据 logger.info("client's ssl connection was complated"); ClientSendHeartbeatTask csht = new ClientSendHeartbeatTask(ctx); Thread heartbeatThread = new Thread(csht); heartbeatThread.start(); ClientSendDataTask csdt = new ClientSendDataTask(ctx); Thread dataThread = new Thread(csdt); dataThread.start(); } } 3. 其中ClientSendDataTask的代码如下: @Override public void run() { while (true) { ByteBuf buff = Unpooled.buffer(); String str = JSONArray.fromObject(etlservice.getEtlTablePojos()).toString(); NioMessage nm = new NioMessage(); nm.setType(Constance.DEMODATA_MSG_TYPE); nm.setJson(str); String jsonStr = JacksonUtil.getJsonStr(nm); buff.writeInt(jsonStr.getBytes().length).writeBytes(jsonStr.getBytes()); ctx.writeAndFlush(buff); // buff.clear(); // buff.release(); logger.info("client ClientSendDataTask : send data msg to server"); try { Thread.sleep(10000); } catch (InterruptedException e) { e.printStackTrace(); } } } 4. server端注册handler代码: SSLEngine sse = server_context.createSSLEngine(); sse.setNeedClientAuth(true); sse.setUseClientMode(false); sc.pipeline().addLast(new ServerOutboundHandler()) .addLast(new SslHandler(sse)) // .addLast(new IdleStateHandler(30, 30, // 0,TimeUnit.SECONDS)) // .addLast(new HeartBeatHandler()) // .addLast(new ServerCodec()) .addLast(new ServerInboundHandler()) .addLast(new ServerLogicHandler()); 5. ServerInboundHandler中处理接收的数据代码: @Override public void channelRead(ChannelHandlerContext ctx, Object msg) { NioMessage nm = NettyUtil.getNioMessage(msg); if (0 == nm.getType()) logger.warn("server channelRead : msg type is zero , throw exception"); try { if (Constance.HEARTBEAT_MSG_TYPE == nm.getType()) { logger.info("server channelRead : heartbeat msg"); HeartBeatEntity hb = JacksonUtil.getObjMapper().readValue(nm.getJson(), HeartBeatEntity.class); ctx.fireChannelRead(hb); } else if (Constance.DEMODATA_MSG_TYPE == nm.getType()) { logger.info("server channelRead : data msg"); // List<EtlTablePojo> pojoList = (List<EtlTablePojo>) // JacksonUtil.getObjMapper().readValue(nm.getJson(), // List.class); System.out.println("=====json is : " + nm.getJson()); ctx.fireChannelRead(nm.getJson()); } else if (Constance.RESULT_MSG_TYPE == nm.getType()) { logger.info("server channelRead : operation result msg"); ResultEntity re = JacksonUtil.getObjMapper().readValue(nm.getJson(), ResultEntity.class); re.toString(); } } catch (JsonParseException e) { e.printStackTrace(); } catch (JsonMappingException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } 6. 运行一段时间后出现错误如下: [nioEventLoopGroup-14-1] DEBUG io.netty.handler.ssl.SslHandler - Swallowing a harmless 'connection reset by peer / broken pipe' error that occurred while writing close_notify in response to the peer's close_notify java.io.IOException: Connection reset by peer at sun.nio.ch.FileDispatcher.read0(Native Method) ~[na:1.6.0_65] at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21) ~[na:1.6.0_65] at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198) ~[na:1.6.0_65] at sun.nio.ch.IOUtil.read(IOUtil.java:166) ~[na:1.6.0_65] at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:245) ~[na:1.6.0_65] at io.netty.buffer.UnpooledUnsafeDirectByteBuf.setBytes(UnpooledUnsafeDirectByteBuf.java:436) ~[netty-all-4.0.11.Final.jar:na] at io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:870) ~[netty-all-4.0.11.Final.jar:na] at io.netty.channel.socket.nio.NioSocketChannel.doReadBytes(NioSocketChannel.java:208) ~[netty-all-4.0.11.Final.jar:na] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:88) ~[netty-all-4.0.11.Final.jar:na] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:485) [netty-all-4.0.11.Final.jar:na] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:452) [netty-all-4.0.11.Final.jar:na] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:346) [netty-all-4.0.11.Final.jar:na] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:101) [netty-all-4.0.11.Final.jar:na] at java.lang.Thread.run(Thread.java:695) [na:1.6.0_65]

netty client server交互问题

Server与Client建立连接后,会执行以下的步骤: 1、Client向Server发送消息 2、Server接收客户端发送的消息,并解析,根据解析内容向客户端发送消息 3、客户端接收到服务端消息,解析,根据解析结果,再发给客户端。 知道出现结束标志。

在中国程序员是青春饭吗?

今年,我也32了 ,为了不给大家误导,咨询了猎头、圈内好友,以及年过35岁的几位老程序员……舍了老脸去揭人家伤疤……希望能给大家以帮助,记得帮我点赞哦。 目录: 你以为的人生 一次又一次的伤害 猎头界的真相 如何应对互联网行业的「中年危机」 一、你以为的人生 刚入行时,拿着傲人的工资,想着好好干,以为我们的人生是这样的: 等真到了那一天,你会发现,你的人生很可能是这样的: ...

程序员请照顾好自己,周末病魔差点一套带走我。

程序员在一个周末的时间,得了重病,差点当场去世,还好及时挽救回来了。

和黑客斗争的 6 天!

互联网公司工作,很难避免不和黑客们打交道,我呆过的两家互联网公司,几乎每月每天每分钟都有黑客在公司网站上扫描。有的是寻找 Sql 注入的缺口,有的是寻找线上服务器可能存在的漏洞,大部分都...

搜狗输入法也在挑战国人的智商!

故事总是一个接着一个到来...上周写完《鲁大师已经彻底沦为一款垃圾流氓软件!》这篇文章之后,鲁大师的市场工作人员就找到了我,希望把这篇文章删除掉。经过一番沟通我先把这篇文章从公号中删除了...

总结了 150 余个神奇网站,你不来瞅瞅吗?

原博客再更新,可能就没了,之后将持续更新本篇博客。

副业收入是我做程序媛的3倍,工作外的B面人生是怎样的?

提到“程序员”,多数人脑海里首先想到的大约是:为人木讷、薪水超高、工作枯燥…… 然而,当离开工作岗位,撕去层层标签,脱下“程序员”这身外套,有的人生动又有趣,马上展现出了完全不同的A/B面人生! 不论是简单的爱好,还是正经的副业,他们都干得同样出色。偶尔,还能和程序员的特质结合,产生奇妙的“化学反应”。 @Charlotte:平日素颜示人,周末美妆博主 大家都以为程序媛也个个不修边幅,但我们也许...

MySQL数据库面试题(2020最新版)

文章目录数据库基础知识为什么要使用数据库什么是SQL?什么是MySQL?数据库三大范式是什么mysql有关权限的表都有哪几个MySQL的binlog有有几种录入格式?分别有什么区别?数据类型mysql有哪些数据类型引擎MySQL存储引擎MyISAM与InnoDB区别MyISAM索引与InnoDB索引的区别?InnoDB引擎的4大特性存储引擎选择索引什么是索引?索引有哪些优缺点?索引使用场景(重点)...

如果你是老板,你会不会踢了这样的员工?

有个好朋友ZS,是技术总监,昨天问我:“有一个老下属,跟了我很多年,做事勤勤恳恳,主动性也很好。但随着公司的发展,他的进步速度,跟不上团队的步伐了,有点...

我入职阿里后,才知道原来简历这么写

私下里,有不少读者问我:“二哥,如何才能写出一份专业的技术简历呢?我总感觉自己写的简历太烂了,所以投了无数份,都石沉大海了。”说实话,我自己好多年没有写过简历了,但我认识的一个同行,他在阿里,给我说了一些他当年写简历的方法论,我感觉太牛逼了,实在是忍不住,就分享了出来,希望能够帮助到你。 01、简历的本质 作为简历的撰写者,你必须要搞清楚一点,简历的本质是什么,它就是为了来销售你的价值主张的。往深...

优雅的替换if-else语句

场景 日常开发,if-else语句写的不少吧??当逻辑分支非常多的时候,if-else套了一层又一层,虽然业务功能倒是实现了,但是看起来是真的很不优雅,尤其是对于我这种有强迫症的程序"猿",看到这么多if-else,脑袋瓜子就嗡嗡的,总想着解锁新姿势:干掉过多的if-else!!!本文将介绍三板斧手段: 优先判断条件,条件不满足的,逻辑及时中断返回; 采用策略模式+工厂模式; 结合注解,锦...

离职半年了,老东家又发 offer,回不回?

有小伙伴问松哥这个问题,他在上海某公司,在离职了几个月后,前公司的领导联系到他,希望他能够返聘回去,他很纠结要不要回去? 俗话说好马不吃回头草,但是这个小伙伴既然感到纠结了,我觉得至少说明了两个问题:1.曾经的公司还不错;2.现在的日子也不是很如意。否则应该就不会纠结了。 老实说,松哥之前也有过类似的经历,今天就来和小伙伴们聊聊回头草到底吃不吃。 首先一个基本观点,就是离职了也没必要和老东家弄的苦...

2020阿里全球数学大赛:3万名高手、4道题、2天2夜未交卷

阿里巴巴全球数学竞赛( Alibaba Global Mathematics Competition)由马云发起,由中国科学技术协会、阿里巴巴基金会、阿里巴巴达摩院共同举办。大赛不设报名门槛,全世界爱好数学的人都可参与,不论是否出身数学专业、是否投身数学研究。 2020年阿里巴巴达摩院邀请北京大学、剑桥大学、浙江大学等高校的顶尖数学教师组建了出题组。中科院院士、美国艺术与科学院院士、北京国际数学...

男生更看重女生的身材脸蛋,还是思想?

往往,我们看不进去大段大段的逻辑。深刻的哲理,往往短而精悍,一阵见血。问:产品经理挺漂亮的,有点心动,但不知道合不合得来。男生更看重女生的身材脸蛋,还是...

程序员为什么千万不要瞎努力?

本文作者用对比非常鲜明的两个开发团队的故事,讲解了敏捷开发之道 —— 如果你的团队缺乏统一标准的环境,那么即使勤劳努力,不仅会极其耗时而且成果甚微,使用...

为什么程序员做外包会被瞧不起?

二哥,有个事想询问下您的意见,您觉得应届生值得去外包吗?公司虽然挺大的,中xx,但待遇感觉挺低,马上要报到,挺纠结的。

当HR压你价,说你只值7K,你该怎么回答?

当HR压你价,说你只值7K时,你可以流畅地回答,记住,是流畅,不能犹豫。 礼貌地说:“7K是吗?了解了。嗯~其实我对贵司的面试官印象很好。只不过,现在我的手头上已经有一份11K的offer。来面试,主要也是自己对贵司挺有兴趣的,所以过来看看……”(未完) 这段话主要是陪HR互诈的同时,从公司兴趣,公司职员印象上,都给予对方正面的肯定,既能提升HR的好感度,又能让谈判气氛融洽,为后面的发挥留足空间。...

面试:第十六章:Java中级开发(16k)

HashMap底层实现原理,红黑树,B+树,B树的结构原理 Spring的AOP和IOC是什么?它们常见的使用场景有哪些?Spring事务,事务的属性,传播行为,数据库隔离级别 Spring和SpringMVC,MyBatis以及SpringBoot的注解分别有哪些?SpringMVC的工作原理,SpringBoot框架的优点,MyBatis框架的优点 SpringCould组件有哪些,他们...

面试阿里p7,被按在地上摩擦,鬼知道我经历了什么?

面试阿里p7被问到的问题(当时我只知道第一个):@Conditional是做什么的?@Conditional多个条件是什么逻辑关系?条件判断在什么时候执...

面试了一个 31 岁程序员,让我有所触动,30岁以上的程序员该何去何从?

最近面试了一个31岁8年经验的程序猿,让我有点感慨,大龄程序猿该何去何从。

大三实习生,字节跳动面经分享,已拿Offer

说实话,自己的算法,我一个不会,太难了吧

程序员垃圾简历长什么样?

已经连续五年参加大厂校招、社招的技术面试工作,简历看的不下于万份 这篇文章会用实例告诉你,什么是差的程序员简历! 疫情快要结束了,各个公司也都开始春招了,作为即将红遍大江南北的新晋UP主,那当然要为小伙伴们做点事(手动狗头)。 就在公众号里公开征简历,义务帮大家看,并一一点评。《启舰:春招在即,义务帮大家看看简历吧》 一石激起千层浪,三天收到两百多封简历。 花光了两个星期的所有空闲时...

《Oracle Java SE编程自学与面试指南》最佳学习路线图2020年最新版(进大厂必备)

正确选择比瞎努力更重要!

《Oracle Java SE编程自学与面试指南》最佳学习路线图(2020最新版)

正确选择比瞎努力更重要!

都前后端分离了,咱就别做页面跳转了!统统 JSON 交互

文章目录1. 无状态登录1.1 什么是有状态1.2 什么是无状态1.3 如何实现无状态1.4 各自优缺点2. 登录交互2.1 前后端分离的数据交互2.2 登录成功2.3 登录失败3. 未认证处理方案4. 注销登录 这是本系列的第四篇,有小伙伴找不到之前文章,松哥给大家列一个索引出来: 挖一个大坑,Spring Security 开搞! 松哥手把手带你入门 Spring Security,别再问密...

字节跳动面试官竟然问了我JDBC?

轻松等回家通知

面试官:你连SSO都不懂,就别来面试了

大厂竟然要考我SSO,卧槽。

阿里面试官让我用Zk(Zookeeper)实现分布式锁

他可能没想到,我当场手写出来了

终于,月薪过5万了!

来看几个问题想不想月薪超过5万?想不想进入公司架构组?想不想成为项目组的负责人?想不想成为spring的高手,超越99%的对手?那么本文内容是你必须要掌握的。本文主要详解bean的生命...

自从喜欢上了B站这12个UP主,我越来越觉得自己是个废柴了!

不怕告诉你,我自从喜欢上了这12个UP主,哔哩哔哩成为了我手机上最耗电的软件,几乎每天都会看,可是吧,看的越多,我就越觉得自己是个废柴,唉,老天不公啊,不信你看看…… 间接性踌躇满志,持续性混吃等死,都是因为你们……但是,自己的学习力在慢慢变强,这是不容忽视的,推荐给你们! 都说B站是个宝,可是有人不会挖啊,没事,今天咱挖好的送你一箩筐,首先啊,我在B站上最喜欢看这个家伙的视频了,为啥 ,咱撇...

代码注释如此沙雕,会玩还是你们程序员!

某站后端代码被“开源”,同时刷遍全网的,还有代码里的那些神注释。 我们这才知道,原来程序员个个都是段子手;这么多年来,我们也走过了他们的无数套路… 首先,产品经理,是永远永远吐槽不完的!网友的评论也非常扎心,说看这些代码就像在阅读程序员的日记,每一页都写满了对产品经理的恨。 然后,也要发出直击灵魂的质问:你是尊贵的付费大会员吗? 这不禁让人想起之前某音乐app的穷逼Vip,果然,穷逼在哪里都是...

立即提问
相关内容推荐