问题遇到的现象和发生背景
一个关于flinksql的开窗函数的问题
遇到的现象和发生背景,请写出第一个错误信息
我希望像hive一样,做一个开窗函数,排序是没问题的,但是sum就不支持
用代码块功能插入代码,请勿粘贴截图。 不用代码块回答率下降 50%
这是我的建表语句
CREATE TABLE `my_hive`.`dl_ods`.`oms_test001` (
is_delete VARCHAR(2147483647)
,oms_order_id VARCHAR(2147483647)
,channel_order_id VARCHAR(2147483647)
,third_order_id VARCHAR(2147483647)
,user_id VARCHAR(2147483647)
,business_id VARCHAR(2147483647)
,store_id VARCHAR(2147483647)
,order_status VARCHAR(2147483647)
,order_status_cur VARCHAR(2147483647)
,order_status_platform VARCHAR(2147483647)
,channel VARCHAR(2147483647)
,order_create_time VARCHAR(2147483647)
,order_update_time VARCHAR(2147483647)
,order_pay_time VARCHAR(2147483647)
,order_pre_start_delivery_time VARCHAR(2147483647)
,order_pre_end_delivery_time VARCHAR(2147483647)
,order_cancel_time VARCHAR(2147483647)
,buyer_full_name VARCHAR(2147483647)
,buyer_full_address VARCHAR(2147483647)
,buyer_telephone VARCHAR(2147483647)
,buyer_mobile VARCHAR(2147483647)
,delivery_carrier_no VARCHAR(2147483647)
,delivery_carrier_name VARCHAR(2147483647)
,delivery_bill_no VARCHAR(2147483647)
,complete_time VARCHAR(2147483647)
,pay_channel VARCHAR(2147483647)
,order_total_money VARCHAR(2147483647)
,order_actual_money VARCHAR(2147483647)
,order_coupon_amount VARCHAR(2147483647)
,order_coupon_type VARCHAR(2147483647)
,buyer_lng VARCHAR(2147483647)
,buyer_lat VARCHAR(2147483647)
,coordinate_type VARCHAR(2147483647)
,buyer_province VARCHAR(2147483647)
,buyer_city VARCHAR(2147483647)
,buyer_area VARCHAR(2147483647)
,buyer_address VARCHAR(2147483647)
,order_buyer_remark VARCHAR(2147483647)
,invoice_type VARCHAR(2147483647)
,invoice_title VARCHAR(2147483647)
,invoice_duty_no VARCHAR(2147483647)
,invoice_mail VARCHAR(2147483647)
,invoice_title_type VARCHAR(2147483647)
,invoice_desc VARCHAR(2147483647)
,invoice_content VARCHAR(2147483647)
,attribue VARCHAR(2147483647)
,order_refund_amount VARCHAR(2147483647)
,order_refund_reason VARCHAR(2147483647)
,order_refund_time VARCHAR(2147483647)
,version VARCHAR(2147483647)
,status VARCHAR(2147483647)
,gmt_create VARCHAR(2147483647)
,gmt_update VARCHAR(2147483647)
,merchant_code VARCHAR(2147483647)
,express_fee VARCHAR(2147483647)
,package_fee VARCHAR(2147483647)
,express_coupon_amount VARCHAR(2147483647)
,store_name VARCHAR(2147483647)
,is_self_pick VARCHAR(2147483647)
,invoice_money VARCHAR(2147483647)
,delivery_type VARCHAR(2147483647)
,is_collage VARCHAR(2147483647)
,collage_status VARCHAR(2147483647)
,collage_complete_time VARCHAR(2147483647)
,store_phone VARCHAR(2147483647)
,store_address VARCHAR(2147483647)
,pos_order_id VARCHAR(2147483647)
,promotion_discounts VARCHAR(2147483647)
,channel_third_order_id VARCHAR(2147483647)
,is_split VARCHAR(2147483647)
,order_type VARCHAR(2147483647)
,buyer_town VARCHAR(2147483647)
,order_goods_type VARCHAR(2147483647)
,buyer_nick VARCHAR(2147483647)
,buyer_message VARCHAR(2147483647)
,seller_memo VARCHAR(2147483647)
,delivery_time VARCHAR(2147483647)
,pay_product_type VARCHAR(2147483647)
,is_self_delivery VARCHAR(2147483647)
,third_express_fee VARCHAR(2147483647)
,is_prescription VARCHAR(2147483647)
,pres_audit_status VARCHAR(2147483647)
,pos_trans_time VARCHAR(2147483647)
,pos_refund_time VARCHAR(2147483647)
,refund_type VARCHAR(2147483647)
,origin_oms_order_id VARCHAR(2147483647)
,refund_sort VARCHAR(2147483647)
,refunded_amount VARCHAR(2147483647)
,zdt_user_id VARCHAR(2147483647)
,zdt_user_name VARCHAR(2147483647)
,cashier_no VARCHAR(2147483647)
,cashier_name VARCHAR(2147483647)
,is_print_ticket VARCHAR(2147483647)
,delivery_mode VARCHAR(2147483647)
,advance_booking_type VARCHAR(2147483647)
,advance_booking_amount VARCHAR(2147483647)
,advance_send_time VARCHAR(2147483647)
,is_frozen VARCHAR(2147483647)
,order_sign VARCHAR(2147483647)
,delivery_carrier_code VARCHAR(2147483647)
,manual_confirmation_type VARCHAR(2147483647)
,manual_confirmation_type_desc VARCHAR(2147483647)
,exceptoin_type VARCHAR(2147483647)
,exceptoin_type_desc VARCHAR(2147483647)
,real_express_bill_no VARCHAR(2147483647)
,member_no VARCHAR(2147483647)
,channel_store_id VARCHAR(2147483647)
,express_type VARCHAR(2147483647)
,proctime AS PROCTIME()
,ts AS TO_TIMESTAMP(gmt_create)
)WITH (
'properties.auto.commit.interval.ms' = '1000',
'canal-json.ignore-parse-errors' = 'true',
'format' = 'canal-json',
'properties.bootstrap.servers' = 'xxxx',
'connector' = 'kafka',
'topic' = 'xxxx',
'scan.startup.mode' = 'group-offsets',
'properties.auto.offset.reset.strategy' = 'earliest',
'properties.group.id' = 'oms_test001',
'properties.enable.auto.commit' = 'true'
)
;
这是我的sql
select *
from
(
select
store_id
,oms_order_id
,ROW_NUMBER() OVER (PARTITION BY oms_order_id order by gmt_create desc) as row_num
from
my_hive.dl_ods.oms_test001
) t
where row_num = 1
;
select *
from
(
select
store_id
,oms_order_id
,sum(cast(express_fee as decimal(32,2))) OVER (PARTITION BY oms_order_id order by gmt_create desc) as express_fee
from
my_hive.dl_ods.oms_test001
) t
where express_fee > 1
;
select *
from
(
select
store_id
,oms_order_id
,sum(cast(express_fee as decimal(32,2))) OVER (PARTITION BY oms_order_id order by gmt_create RANGE BETWEEN INTERVAL '1' HOUR PRECEDING AND CURRENT ROW) as express_fee
from
my_hive.dl_ods.oms_test001
) t
where express_fee > 1
;
运行结果及详细报错内容



我的解答思路和尝试过的方法,不写自己思路的,回答率下降 60%
从报错上来看,是因为我的数据源支持删除操作导致的。
我想要达到的结果,如果你需要快速回答,请尝试 “付费悬赏”

我的疑问是,像我的数据源这种支持删除操作,又不断写入数据的表是不是属于流表?该怎么称呼?
我查了官网,flink是支持sum() over() 这种操作的,关于flink的窗口函数在什么条件下是支持的?什么条件下是不支持的?我这个表要想支持这个操作该怎么转换?