我使用的是jdk17,在使用flink1.17时,启动报错。

下面是我的代码
package org.dromara.study;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
/**
* @Description DataStream 实现
* @Author xyq
* @Date 2025/1/12 17:57
*/
public class WordCountDataStream {
public static void main(String[] args) {
// 1.创建执行环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// 2.读取数据
DataStreamSource<String> lineDs = env.readTextFile("D:\\workSpace\\SDYT_NEW\\security-operation-plus\\doc\\word.txt");
// 3.处理数据
SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndOneDs = lineDs.flatMap(new FlatMapImpl());
// 4.分组
KeyedStream<Tuple2<String, Integer>, String> workAndOneKs = wordAndOneDs.keyBy(new KeySelector<Tuple2<String, Integer>, String>() {
@Override
public String getKey(Tuple2<String, Integer> value) throws Exception {
return value.f0;
}
});
// 5.聚合
SingleOutputStreamOperator<Tuple2<String, Integer>> sunDs = workAndOneKs.sum(1);
// 4.输出数据
sunDs.print();
// 5. 执行:类似与sparkstreaming 最后 ssc.start()方法
try {
env.execute();
} catch (Exception e) {
e.printStackTrace();
}
}
}