insert select 语句报 错误代码: 1105 runtime error: invalid memory address or nil pointer dereference

Bug 反馈
清晰准确地描述您发现的问题,提供任何可能复现问题的步骤有助于研发同学及时处理问题
【 Bug 的影响】
tidb 5.2.2
【可能的问题复现步骤】
select 语句很快 结果集也就 几十 行
INSERT INTO Material_Rank_List_Daily (
Platform_Type,
material_type,
Img_Path,
Material_Cover,
Material_Name,
Spec_Material_Id,
Material_Id,
Agent_Material_Id,
upload_User_Id,
upload_User_Name,
design_user,
Customer_Id,
Customer_Name,
Industry_Id,
Industry_Name,
material_tag,
totalCost,
cost,
impression,
click,
conversion,
conversion_cost,
conversion_rate,
ctr,
video_play_count,
video_outer_play_count,
video_outer_play100_count,
video_play_rate,
video_play_over_rate,
video_avg_play_time,
Modify_Time,
Create_Time
) SELECT
temp.Platform_Type,
temp.Material_Type,
temp.Img_Path,
NULL,
temp.material_name,
temp.Spec_Material_Id,
temp.Material_Id,
temp.Agent_Material_Id,
temp.Upload_User_Id,
eus.display_name,
temp.design_user,
temp.Customer_Id,
temp.Customer_Name,
temp.Industry_Id,
temp.Industry_Name,
GROUP_CONCAT(
DISTINCT cmt.tag_name SEPARATOR ‘|’
) AS material_tag,
0,
temp.cost,
temp.impression,
temp.click,
temp.conversion,
temp.conversion_cost,
temp.conversion_rate,
temp.ctr,
temp.video_play_count,
temp.video_outer_play_count,
temp.video_outer_play100_count,
temp.video_play_rate,
temp.video_play_over_rate,
temp.video_avg_play_time,
NOW(),
NOW()
FROM
(
SELECT
t2.*
FROM
(
SELECT
, rank () over (
PARTITION BY Industry_Id
ORDER BY
cost DESC
) AS rk
FROM
(
SELECT
Platform_Type,
Material_Type,
Spec_Material_Id,
Material_Id,
agent_material_id,
material_name,
max(Img_Path) AS Img_Path,
GROUP_CONCAT(
DISTINCT Role_Name,
‘:’,
User_Name
ORDER BY
Role_Name SEPARATOR ‘;’
) AS Design_User,
customer_id,
GROUP_CONCAT(
DISTINCT “(”,
customer_id,
‘)’,
customer_name
ORDER BY
customer_id SEPARATOR ‘、’
) AS customer_name,
Industry_Id,
industry_name,
User_Id AS Upload_User_Id,
sum(cost) AS cost,
sum(impression) AS impression,
sum(click) AS click,
sum(conversion) AS conversion,
ifnull(
SUM(cost) / SUM(conversion),
0
) AS conversion_cost,
ifnull(
SUM(conversion) / SUM(click),
0
) AS conversion_rate,
ifnull(
SUM(click) / SUM(impression),
0
) AS ctr,
SUM(video_play_count) AS video_play_count,
SUM(video_outer_play_count) AS video_outer_play_count,
sum(video_outer_play100_count) AS video_outer_play100_count,
ifnull(
SUM(video_play_count) / SUM(impression),
0
) AS video_play_rate,
ifnull(
video_outer_play100_count / video_play_count,
0
) AS video_play_over_rate,
video_avg_play_time
FROM
Material_Participant_Daily_Report
WHERE
Report_Date = ‘2021-12-14’
AND cost > 0
AND Industry_Id <> 0
AND material_type = 0
GROUP BY
ifnull(
Spec_Material_Id,
Agent_Material_Id
),
Industry_Id
) t1
) t2
WHERE
rk <= 10
) temp
LEFT JOIN mbg_business.creative_material_tag_info cmti ON temp.Spec_Material_Id = cmti.Spec_Material_Id
LEFT JOIN mbg_business.creative_material_tag cmt ON cmti.material_tag_id = cmt.id
LEFT JOIN mbg_core.emarbox_user eus ON temp.Upload_User_Id = eus.user_id
GROUP BY
ifnull(
Spec_Material_Id,
Agent_Material_Id
)
【看到的非预期行为】
[2021/12/14 11:24:17.646 +08:00] [INFO] [conn.go:1007] [“command dispatched failed”] [conn=4580375] [connInfo=“id:4580375, addr:123.59.17.29:58626 status:10, collation:latin1_swedish_ci, user:dspwrite”] [command=Query] [status=“inTxn:0, autocommit:1”] [sql="INSERT INTO Material_Rank_List_Daily (\r\ \tPlatform_Type,\r\ \tmaterial_type,\r\ \tImg_Path,\r\ \tMaterial_Cover,\r\ \tMaterial_Name,\r\ \tSpec_Material_Id,\r\ \tMaterial_Id,\r\ \tAgent_Material_Id,\r\ \tupload_User_Id,\r\ \tupload_User_Name,\r\ \tdesign_user,\r\ \tCustomer_Id,\r\ \tCustomer_Name,\r\ \tIndustry_Id,\r\ \tIndustry_Name,\r\ \tmaterial_tag,\r\ \ttotalCost,\r\ \tcost,\r\ \timpression,\r\ \tclick,\r\ \tconversion,\r\ \tconversion_cost,\r\ \tconversion_rate,\r\ \tctr,\r\ \tvideo_play_count,\r\ \tvideo_outer_play_count,\r\ \tvideo_outer_play100_count,\r\ \tvideo_play_rate,\r\ \tvideo_play_over_rate,\r\ \tvideo_avg_play_time,\r\ \tModify_Time,\r\ \tCreate_Time\r\ ) SELECT\r\ \ttemp.Platform_Type,\r\ \ttemp.Material_Type,\r\ \ttemp.Img_Path,\r\ \tNULL,\r\ \ttemp.material_name,\r\ \ttemp.Spec_Material_Id,\r\ \ttemp.Material_Id,\r\ \ttemp.Agent_Material_Id,\r\ \ttemp.Upload_User_Id,\r\ \teus.display_name,\r\ \ttemp.design_user,\r\ \ttemp.Customer_Id,\r\ \ttemp.Customer_Name,\r\ \ttemp.Industry_Id,\r\ \ttemp.Industry_Name,\r\ \tGROUP_CONCAT(\r\ \t\tDISTINCT cmt.tag_name SEPARATOR ‘|’\r\ \t) AS material_tag,\r\ \t0,\r\ \ttemp.cost,\r\ \ttemp.impression,\r\ \ttemp.click,\r\ \ttemp.conversion,\r\ \ttemp.conversion_cost,\r\ \ttemp.conversion_rate,\r\ \ttemp.ctr,\r\ \ttemp.video_play_count,\r\ \ttemp.video_outer_play_count,\r\ \ttemp.video_outer_play100_count,\r\ \ttemp.video_play_rate,\r\ \ttemp.video_play_over_rate,\r\ \ttemp.video_avg_play_time,\r\ \tNOW(),\r\ \tNOW()\r\ FROM\r\ \t(\r\ \t\tSELECT\r\ \t\t\tt2.
\r\ \t\tFROM\r\ \t\t\t(\r\ \t\t\t\tSELECT\r\ \t\t\t\t\t*, rank () over (\r\ \t\t\t\t\t\tPARTITION BY Industry_Id\r\ \t\t\t\t\t\tORDER BY\r\ \t\t\t\t\t\t\tcost DESC\r\ \t\t\t\t\t) AS rk\r\ \t\t\t\tFROM\r\ \t\t\t\t\t(\r\ \t\t\t\t\t\tSELECT\r\ \t\t\t\t\t\t\tPlatform_Type,\r\ \t\t\t\t\t\t\tMaterial_Type,\r\ \t\t\t\t\t\t\tSpec_Material_Id,\r\ \t\t\t\t\t\t\tMaterial_Id,\r\ \t\t\t\t\t\t\tagent_material_id,\r\ \t\t\t\t\t\t\tmaterial_name,\r\ \t\t\t\t\t\t\tmax(Img_Path) AS Img_Path,\r\ \t\t\t\t\t\t\tGROUP_CONCAT(\r\ \t\t\t\t\t\t\t\tDISTINCT Role_Name,\r\ \t\t\t\t\t\t\t\t’:’,\r\ \t\t\t\t\t\t\t\tUser_Name\r\ \t\t\t\t\t\t\tORDER BY\r\ \t\t\t\t\t\t\t\tRole_Name SEPARATOR ‘;’\r\ \t\t\t\t\t\t\t) AS Design_User,\r\ \t\t\t\t\t\t\tcustomer_id,\r\ \t\t\t\t\t\t\tGROUP_CONCAT(\r\ \t\t\t\t\t\t\t\tDISTINCT “(”,\r\ \t\t\t\t\t\t\t\tcustomer_id,\r\ \t\t\t\t\t\t\t\t’)’,\r\ \t\t\t\t\t\t\t\tcustomer_name\r\ \t\t\t\t\t\t\tORDER BY\r\ \t\t\t\t\t\t\t\tcustomer_id SEPARATOR ‘、’\r\ \t\t\t\t\t\t\t) AS customer_name,\r\ \t\t\t\t\t\t\tIndustry_Id,\r\ \t\t\t\t\t\t\tindustry_name,\r\ \t\t\t\t\t\t\tUser_Id AS Upload_User_Id,\r\ \t\t\t\t\t\t\tsum(cost) AS cost,\r\ \t\t\t\t\t\t\tsum(impression) AS impression,\r\ \t\t\t\t\t\t\tsum(click) AS click,\r\ \t\t\t\t\t\t\tsum(conversion) AS conversion,\r\ \t\t\t\t\t\t\tifnull(\r\ \t\t\t\t\t\t\t\tSUM(cost) / SUM(conversion),\r\ \t\t\t\t\t\t\t\t0\r\ \t\t\t\t\t\t\t) AS conversion_cost,\r\ \t\t\t\t\t\t\tifnull(\r\ \t\t\t\t\t\t\t\tSUM(conversion) / SUM(click),\r\ \t\t\t\t\t\t\t\t0\r\ \t\t\t\t\t\t\t) AS conversion_rate,\r\ \t\t\t\t\t\t\tifnull(\r\ \t\t\t\t\t\t\t\tSUM(click) / SUM(impression),\r\ \t\t\t\t\t\t\t\t0\r\ \t\t\t\t\t\t\t) AS ctr,\r\ \t\t\t\t\t\t\tSUM(video_play_count) AS video_play_count,\r\ \t\t\t\t\t\t\tSUM(video_outer_play_count) AS video_outer_play_count,\r\ \t\t\t\t\t\t\tsum(video_outer_play100_count) AS video_outer_play100_count,\r\ \t\t\t\t\t\t\tifnull(\r\ \t\t\t\t\t\t\t\tSUM(video_play_count) / SUM(impression),\r\ \t\t\t\t\t\t\t\t0\r\ \t\t\t\t\t\t\t) AS video_play_rate,\r\ \t\t\t\t\t\t\tifnull(\r\ \t\t\t\t\t\t\t\tvideo_outer_play100_count / video_play_count,\r\ \t\t\t\t\t\t\t\t0\r\ \t\t\t\t\t\t\t) AS video_play_over_rate,\r\ \t\t\t\t\t\t\tvideo_avg_play_time\r\ \t\t\t\t\t\tFROM\r\ \t\t\t\t\t\t\tMaterial_Participant_Daily_Report\r\ \t\t\t\t\t\tWHERE\r\ \t\t\t\t\t\t\tReport_Date = ‘2021-12-14’\r\ \t\t\t\t\t\tAND cost > 0\r\ \t\t\t\t\t\tAND Industry_Id <> 0\r\ \t\t\t\t\t\tAND material_type = 0\r\ \t\t\t\t\t\tGROUP BY\r\ \t\t\t\t\t\t\tifnull(\r\ \t\t\t\t\t\t\t\tSpec_Material_Id,\r\ \t\t\t\t\t\t\t\tAgent_Material_Id\r\ \t\t\t\t\t\t\t),\r\ \t\t\t\t\t\t\tIndustry_Id\r\ \t\t\t\t\t) t1\r\ \t\t\t) t2\r\ \t\tWHERE\r\ \t\t\trk <= 10\r\ \t) temp\r\ LEFT JOIN mbg_business.creative_material_tag_info cmti ON temp.Spec_Material_Id = cmti.Spec_Material_Id\r\ LEFT JOIN mbg_business.creative_material_tag cmt ON cmti.material_tag_id = cmt.id\r\ LEFT JOIN mbg_core.emarbox_user eus ON temp.Upload_User_Id = eus.user_id\r\ GROUP BY\r\ \tifnull(\r\ \t\tSpec_Material_Id,\r\ \t\tAgent_Material_Id\r\ \t)"] [txn_mode=PESSIMISTIC] [err=“runtime error: invalid memory address or nil pointer dereference\ngithub.com/pingcap/tidb/executor.(*IndexNestedLoopHashJoin).finishJoinWorkers\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/index_lookup_hash_join.go:203\ngithub.com/pingcap/tidb/util.WithRecovery.func1\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/util/misc.go:91\ runtime.gopanic\ \t/usr/local/go/src/runtime/panic.go:965\ runtime.panicmem\ \t/usr/local/go/src/runtime/panic.go:212\ runtime.sigpanic\ \t/usr/local/go/src/runtime/signal_unix.go:734\ github.com/pingcap/tidb/util/chunk.(*Chunk).NumRows\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/util/chunk/chunk.go:349\ngithub.com/pingcap/tidb/executor.(*vecGroupChecker).splitIntoGroups\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/aggregate.go:1456\ngithub.com/pingcap/tidb/executor.(*PipelinedWindowExec).getRowsInPartition\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/pipelined_window.go:188\ngithub.com/pingcap/tidb/executor.(*PipelinedWindowExec).Next\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/pipelined_window.go:124\ngithub.com/pingcap/tidb/executor.Next\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/executor.go:285\ github.com/pingcap/tidb/executor.(*SelectionExec).Next\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/executor.go:1301\ngithub.com/pingcap/tidb/executor.Next\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/executor.go:285\ github.com/pingcap/tidb/executor.(*outerWorker).buildTask\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/index_lookup_join.go:402\ngithub.com/pingcap/tidb/executor.(*indexHashJoinOuterWorker).buildTask\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/index_lookup_hash_join.go:359\ngithub.com/pingcap/tidb/executor.(*indexHashJoinOuterWorker).run\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/index_lookup_hash_join.go:328\ngithub.com/pingcap/tidb/executor.(*IndexNestedLoopHashJoin).startWorkers.func1\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/executor/index_lookup_hash_join.go:171\ngithub.com/pingcap/tidb/util.WithRecovery\ \t/home/jenkins/agent/workspace/optimization-build-tidb-linux-amd/go/src/github.com/pingcap/tidb/util/misc.go:99\ runtime.goexit\ \t/usr/local/go/src/runtime/asm_amd64.s:1371”]
【期望看到的行为】

【相关组件及具体版本】

【其他背景信息或者截图】
如集群拓扑,系统和内核版本,应用 app 信息等;如果问题跟 SQL 有关,请提供 SQL 语句和相关表的 Schema 信息;如果节点日志存在关键报错,请提供相关节点的日志内容或文件;如果一些业务敏感信息不便提供,请留下联系方式,我们与您私下沟通。

select 语句的执行计划发一下

id estRows task access object operator info
Projection_20 1.00 root yixintui_operate.material_participant_daily_report.platform_type, yixintui_operate.material_participant_daily_report.material_type, Column#52, ->Column#112, yixintui_operate.material_participant_daily_report.material_name, yixintui_operate.material_participant_daily_report.spec_material_id, yixintui_operate.material_participant_daily_report.material_id, yixintui_operate.material_participant_daily_report.agent_material_id, yixintui_operate.material_participant_daily_report.user_id, mbg_core.emarbox_user.display_name, Column#53, yixintui_operate.material_participant_daily_report.customer_id, Column#54, yixintui_operate.material_participant_daily_report.industry_id, yixintui_operate.material_participant_daily_report.industry_name, Column#111, 0->Column#113, Column#55, Column#56, Column#57, Column#58, Column#63, Column#64, Column#59, Column#60, Column#61, Column#65, Column#66, yixintui_operate.material_participant_daily_report.video_avg_play_time, 2021-12-14 15:11:33->Column#114, 2021-12-14 15:11:33->Column#115
└─HashAgg_21 1.00 root group by:Column#206, funcs:group_concat(distinct Column#179 separator "
└─Projection_99 1.71 root mbg_business.creative_material_tag.tag_name, yixintui_operate.material_participant_daily_report.platform_type, yixintui_operate.material_participant_daily_report.material_type, yixintui_operate.material_participant_daily_report.spec_material_id, yixintui_operate.material_participant_daily_report.material_id, yixintui_operate.material_participant_daily_report.agent_material_id, yixintui_operate.material_participant_daily_report.material_name, Column#52, Column#53, yixintui_operate.material_participant_daily_report.customer_id, Column#54, yixintui_operate.material_participant_daily_report.industry_id, yixintui_operate.material_participant_daily_report.industry_name, yixintui_operate.material_participant_daily_report.user_id, Column#55, Column#56, Column#57, Column#58, Column#63, Column#64, Column#59, Column#60, Column#61, Column#65, Column#66, yixintui_operate.material_participant_daily_report.video_avg_play_time, mbg_core.emarbox_user.display_name, ifnull(cast(yixintui_operate.material_participant_daily_report.spec_material_id, var_string(20)), yixintui_operate.material_participant_daily_report.agent_material_id)->Column#206
└─HashJoin_35 1.71 root left outer join, equal:[eq(yixintui_operate.material_participant_daily_report.user_id, mbg_core.emarbox_user.user_id)]
├─HashJoin_46(Build) 1.71 root left outer join, equal:[eq(mbg_business.creative_material_tag_info.material_tag_id, mbg_business.creative_material_tag.id)]
│ ├─IndexHashJoin_53(Build) 1.37 root left outer join, inner:IndexLookUp_50, outer key:yixintui_operate.material_participant_daily_report.spec_material_id, inner key:mbg_business.creative_material_tag_info.spec_material_id, equal cond:eq(yixintui_operate.material_participant_daily_report.spec_material_id, mbg_business.creative_material_tag_info.spec_material_id)
│ │ ├─Selection_63(Build) 0.80 root le(Column#68, 10)
│ │ │ └─Window_64 1.00 root rank()->Column#68 over(partition by yixintui_operate.material_participant_daily_report.industry_id order by Column#55 desc)
│ │ │ └─Sort_80 1.00 root yixintui_operate.material_participant_daily_report.industry_id, Column#55:desc
│ │ │ └─Projection_66 1.00 root yixintui_operate.material_participant_daily_report.platform_type, yixintui_operate.material_participant_daily_report.material_type, yixintui_operate.material_participant_daily_report.spec_material_id, yixintui_operate.material_participant_daily_report.material_id, yixintui_operate.material_participant_daily_report.agent_material_id, yixintui_operate.material_participant_daily_report.material_name, Column#52, Column#53, yixintui_operate.material_participant_daily_report.customer_id, Column#54, yixintui_operate.material_participant_daily_report.industry_id, yixintui_operate.material_participant_daily_report.industry_name, yixintui_operate.material_participant_daily_report.user_id, Column#55, Column#56, Column#57, Column#58, ifnull(div(Column#58, Column#57), 0)->Column#63, ifnull(div(Column#57, Column#56), 0)->Column#64, Column#59, Column#60, Column#61, ifnull(div(cast(Column#59, double BINARY), Column#56), 0)->Column#65, ifnull(div(cast(yixintui_operate.material_participant_daily_report.video_outer_play100_count, decimal(20,0) BINARY), cast(yixintui_operate.material_participant_daily_report.video_play_count, decimal(20,0) BINARY)), 0)->Column#66, yixintui_operate.material_participant_daily_report.video_avg_play_time
│ │ │ └─HashAgg_67 1.00 root group by:Column#177, Column#178, funcs:max(Column#150)->Column#52, funcs:group_concat(distinct Column#151, “:”, Column#152 order by Column#153 separator “;”)->Column#53, funcs:group_concat(distinct “(”, Column#154, “)”, Column#155 order by Column#156 separator “、”)->Column#54, funcs:sum(Column#157)->Column#55, funcs:sum(Column#158)->Column#56, funcs:sum(Column#159)->Column#57, funcs:sum(Column#160)->Column#58, funcs:sum(Column#161)->Column#59, funcs:sum(Column#162)->Column#60, funcs:sum(Column#163)->Column#61, funcs:firstrow(Column#164)->yixintui_operate.material_participant_daily_report.material_type, funcs:firstrow(Column#165)->yixintui_operate.material_participant_daily_report.material_name, funcs:firstrow(Column#166)->yixintui_operate.material_participant_daily_report.spec_material_id, funcs:firstrow(Column#167)->yixintui_operate.material_participant_daily_report.material_id, funcs:firstrow(Column#168)->yixintui_operate.material_participant_daily_report.agent_material_id, funcs:firstrow(Column#169)->yixintui_operate.material_participant_daily_report.user_id, funcs:firstrow(Column#170)->yixintui_operate.material_participant_daily_report.platform_type, funcs:firstrow(Column#171)->yixintui_operate.material_participant_daily_report.customer_id, funcs:firstrow(Column#172)->yixintui_operate.material_participant_daily_report.industry_id, funcs:firstrow(Column#173)->yixintui_operate.material_participant_daily_report.industry_name, funcs:firstrow(Column#174)->yixintui_operate.material_participant_daily_report.video_play_count, funcs:firstrow(Column#175)->yixintui_operate.material_participant_daily_report.video_outer_play100_count, funcs:firstrow(Column#176)->yixintui_operate.material_participant_daily_report.video_avg_play_time
│ │ │ └─Projection_98 0.02 root yixintui_operate.material_participant_daily_report.img_path, yixintui_operate.material_participant_daily_report.role_name, yixintui_operate.material_participant_daily_report.user_name, yixintui_operate.material_participant_daily_report.role_name, cast(yixintui_operate.material_participant_daily_report.customer_id, var_string(20))->Column#154, yixintui_operate.material_participant_daily_report.customer_name, yixintui_operate.material_participant_daily_report.customer_id, yixintui_operate.material_participant_daily_report.cost, yixintui_operate.material_participant_daily_report.impression, yixintui_operate.material_participant_daily_report.click, yixintui_operate.material_participant_daily_report.conversion, cast(yixintui_operate.material_participant_daily_report.video_play_count, decimal(41,0) BINARY)->Column#161, cast(yixintui_operate.material_participant_daily_report.video_outer_play_count, decimal(41,0) BINARY)->Column#162, cast(yixintui_operate.material_participant_daily_report.video_outer_play100_count, decimal(41,0) BINARY)->Column#163, yixintui_operate.material_participant_daily_report.material_type, yixintui_operate.material_participant_daily_report.material_name, yixintui_operate.material_participant_daily_report.spec_material_id, yixintui_operate.material_participant_daily_report.material_id, yixintui_operate.material_participant_daily_report.agent_material_id, yixintui_operate.material_participant_daily_report.user_id, yixintui_operate.material_participant_daily_report.platform_type, yixintui_operate.material_participant_daily_report.customer_id, yixintui_operate.material_participant_daily_report.industry_id, yixintui_operate.material_participant_daily_report.industry_name, yixintui_operate.material_participant_daily_report.video_play_count, yixintui_operate.material_participant_daily_report.video_outer_play100_count, yixintui_operate.material_participant_daily_report.video_avg_play_time, ifnull(cast(yixintui_operate.material_participant_daily_report.spec_material_id, var_string(20)), yixintui_operate.material_participant_daily_report.agent_material_id)->Column#177, yixintui_operate.material_participant_daily_report.industry_id
│ │ │ └─IndexLookUp_79 0.02 root
│ │ │ ├─Selection_77(Build) 0.02 cop[tikv] eq(yixintui_operate.material_participant_daily_report.material_type, 0)
│ │ │ │ └─IndexRangeScan_75 10.00 cop[tikv] table:Material_Participant_Daily_Report, partition:p20211214, index:uk_id(Report_Date, User_Id, Role_Id, Material_Type, Agent_Material_Id, Platform_Type, Account_Id) range:[“2021-12-14”,“2021-12-14”], keep order:false, stats:pseudo
│ │ │ └─Selection_78(Probe) 0.02 cop[tikv] gt(yixintui_operate.material_participant_daily_report.cost, 0), ne(yixintui_operate.material_participant_daily_report.industry_id, 0)
│ │ │ └─TableRowIDScan_76 0.02 cop[tikv] table:Material_Participant_Daily_Report, partition:p20211214 keep order:false, stats:pseudo
│ │ └─IndexLookUp_50(Probe) 1.71 root
│ │ ├─Selection_49(Build) 1.71 cop[tikv] not(isnull(mbg_business.creative_material_tag_info.spec_material_id))
│ │ │ └─IndexRangeScan_47 1.73 cop[tikv] table:cmti, index:index_smi(spec_material_id) range: decided by [eq(mbg_business.creative_material_tag_info.spec_material_id, yixintui_operate.material_participant_daily_report.spec_material_id)], keep order:false
│ │ └─TableRowIDScan_48(Probe) 1.71 cop[tikv] table:cmti keep order:false
│ └─TableReader_93(Probe) 138.00 root data:TableFullScan_92
│ └─TableFullScan_92 138.00 cop[tiflash] table:cmt keep order:false, stats:pseudo
└─TableReader_97(Probe) 4327.00 root data:TableFullScan_96
└─TableFullScan_96 4327.00 cop[tiflash] table:eus keep order:false

去掉一下部分字段 就可以写入 。
temp.conversion_rate,
temp.ctr,
temp.video_play_count,
temp.video_outer_play_count,
temp.video_outer_play100_count,
temp.video_play_rate,
temp.video_play_over_rate,
temp.video_avg_play_time,

INSERT INTO Material_Rank_List_Daily_hem (
Platform_Type,
material_type,
Img_Path,
Material_Cover,
Material_Name,
Spec_Material_Id,
Material_Id,
Agent_Material_Id,
upload_User_Id,
upload_User_Name,
design_user,
Customer_Id,
Customer_Name,
Industry_Id,
Industry_Name,
material_tag,
totalCost,
cost,
impression,
click,
conversion,

`Modify_Time`,
`Create_Time`

) SELECT
temp.Platform_Type,
temp.Material_Type,
temp.Img_Path,
NULL,
temp.material_name,
temp.Spec_Material_Id,
temp.Material_Id,
temp.Agent_Material_Id,
temp.Upload_User_Id,
eus.display_name,
temp.design_user,
temp.Customer_Id,
temp.Customer_Name,
temp.Industry_Id,
temp.Industry_Name,
GROUP_CONCAT(
DISTINCT cmt.tag_name SEPARATOR ‘|’
) AS material_tag,
0,
temp.cost,
temp.impression,
temp.click,
temp.conversion,

NOW(),
NOW()

FROM
(
SELECT
t2.*
FROM
(
SELECT
*, rank () over (
PARTITION BY Industry_Id
ORDER BY
cost DESC
) AS rk
FROM
(
SELECT
Platform_Type,
Material_Type,
Spec_Material_Id,
Material_Id,
agent_material_id,
material_name,
MAX(Img_Path) AS Img_Path,
GROUP_CONCAT(
DISTINCT Role_Name,
‘:’,
User_Name
ORDER BY
Role_Name SEPARATOR ‘;’
) AS Design_User,
customer_id,
GROUP_CONCAT(
DISTINCT “(”,
customer_id,
‘)’,
customer_name
ORDER BY
customer_id SEPARATOR ‘、’
) AS customer_name,
Industry_Id,
industry_name,
User_Id AS Upload_User_Id,
SUM(cost) AS cost,
SUM(impression) AS impression,
SUM(click) AS click,
SUM(conversion) AS conversion,
IFNULL(
SUM(cost) / SUM(conversion),
0
) AS conversion_cost,
IFNULL(
SUM(conversion) / SUM(click),
0
) AS conversion_rate,
IFNULL(
SUM(click) / SUM(impression),
0
) AS ctr,
SUM(video_play_count) AS video_play_count,
SUM(video_outer_play_count) AS video_outer_play_count,
SUM(video_outer_play100_count) AS video_outer_play100_count,
IFNULL(
SUM(video_play_count) / SUM(impression),
0
) AS video_play_rate,
IFNULL(
video_outer_play100_count / video_play_count,
0
) AS video_play_over_rate,
video_avg_play_time
FROM
Material_Participant_Daily_Report
WHERE
Report_Date = ‘2021-12-14’
AND cost > 0
AND Industry_Id <> 0
AND material_type = 0
GROUP BY
IFNULL(
Spec_Material_Id,
Agent_Material_Id
),
Industry_Id
) t1
) t2
WHERE
rk <= 10
) temp
LEFT JOIN mbg_business.creative_material_tag_info cmti ON temp.Spec_Material_Id = cmti.Spec_Material_Id
LEFT JOIN mbg_business.creative_material_tag cmt ON cmti.material_tag_id = cmt.id
LEFT JOIN mbg_core.emarbox_user eus ON temp.Upload_User_Id = eus.user_id
GROUP BY
IFNULL(
Spec_Material_Id,
Agent_Material_Id
)

我用tispark-assembly-2.4.2.jar 的话 spark-sql 启动异常 Exception in thread “main” java.lang.NoSuchMethodError: scala.Function1.$init$(Lscala/Function1;)V

[root@yzdmp006044 bin]# ./spark-sql
21/12/14 16:06:07 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform… using builtin-java classes where applicable
21/12/14 16:06:08 INFO HiveMetaStore: 0: Opening raw store with implemenation class:org.apache.hadoop.hive.metastore.ObjectStore
21/12/14 16:06:08 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored
21/12/14 16:06:08 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored
21/12/14 16:06:11 INFO Datastore: The class “org.apache.hadoop.hive.metastore.model.MFieldSchema” is tagged as “embedded-only” so does not have its own datastore table.
21/12/14 16:06:11 INFO Datastore: The class “org.apache.hadoop.hive.metastore.model.MOrder” is tagged as “embedded-only” so does not have its own datastore table.
21/12/14 16:06:11 INFO Datastore: The class “org.apache.hadoop.hive.metastore.model.MFieldSchema” is tagged as “embedded-only” so does not have its own datastore table.
21/12/14 16:06:11 INFO Datastore: The class “org.apache.hadoop.hive.metastore.model.MOrder” is tagged as “embedded-only” so does not have its own datastore table.
21/12/14 16:06:11 INFO Query: Reading in results for query “org.datanucleus.store.rdbms.query.SQLQuery@0” since the connection used is closing
21/12/14 16:06:11 INFO MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY
21/12/14 16:06:11 INFO HiveMetaStore: Added admin role in metastore
21/12/14 16:06:11 INFO HiveMetaStore: Added public role in metastore
21/12/14 16:06:12 INFO HiveMetaStore: No user is added in admin role, since config is empty
21/12/14 16:06:12 INFO HiveMetaStore: 0: get_all_databases
21/12/14 16:06:12 INFO audit: ugi=root ip=unknown-ip-addr cmd=get_all_databases
21/12/14 16:06:12 INFO HiveMetaStore: 0: get_functions: db=default pat=*
21/12/14 16:06:12 INFO audit: ugi=root ip=unknown-ip-addr cmd=get_functions: db=default pat=*
21/12/14 16:06:12 INFO Datastore: The class “org.apache.hadoop.hive.metastore.model.MResourceUri” is tagged as “embedded-only” so does not have its own datastore table.
21/12/14 16:06:12 INFO SessionState: Created local directory: /tmp/402228b4-19b8-484b-b198-d504b82a7b67_resources
21/12/14 16:06:12 INFO SessionState: Created HDFS directory: /tmp/hive/root/402228b4-19b8-484b-b198-d504b82a7b67
21/12/14 16:06:12 INFO SessionState: Created local directory: /tmp/root/402228b4-19b8-484b-b198-d504b82a7b67
21/12/14 16:06:12 INFO SessionState: Created HDFS directory: /tmp/hive/root/402228b4-19b8-484b-b198-d504b82a7b67/_tmp_space.db
21/12/14 16:06:12 INFO SparkContext: Running Spark version 2.4.3
21/12/14 16:06:12 INFO SparkContext: Submitted application: SparkSQL::123.59.17.44
21/12/14 16:06:12 INFO SecurityManager: Changing view acls to: root
21/12/14 16:06:12 INFO SecurityManager: Changing modify acls to: root
21/12/14 16:06:12 INFO SecurityManager: Changing view acls groups to:
21/12/14 16:06:12 INFO SecurityManager: Changing modify acls groups to:
21/12/14 16:06:12 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set()
21/12/14 16:06:13 INFO Utils: Successfully started service ‘sparkDriver’ on port 30047.
21/12/14 16:06:13 INFO SparkEnv: Registering MapOutputTracker
21/12/14 16:06:13 INFO SparkEnv: Registering BlockManagerMaster
21/12/14 16:06:13 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
21/12/14 16:06:13 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
21/12/14 16:06:13 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-050b98a3-b303-403c-8b5b-134f924aff51
21/12/14 16:06:13 INFO MemoryStore: MemoryStore started with capacity 991.5 MB
21/12/14 16:06:13 INFO SparkEnv: Registering OutputCommitCoordinator
21/12/14 16:06:13 INFO Utils: Successfully started service ‘SparkUI’ on port 4040.
21/12/14 16:06:13 INFO SparkUI: Bound SparkUI to 123.59.17.44, and started at http://123.59.17.44:4040
21/12/14 16:06:13 INFO StandaloneAppClient$ClientEndpoint: Connecting to master spark://123.59.17.44:7077…
21/12/14 16:06:13 INFO TransportClientFactory: Successfully created connection to /123.59.17.44:7077 after 41 ms (0 ms spent in bootstraps)
21/12/14 16:06:13 INFO StandaloneSchedulerBackend: Connected to Spark cluster with app ID app-20211214160613-0003
21/12/14 16:06:13 INFO StandaloneAppClient$ClientEndpoint: Executor added: app-20211214160613-0003/0 on worker-20211214153716-123.59.17.46-7078 (123.59.17.46:7078) with 5 core(s)
21/12/14 16:06:13 INFO StandaloneSchedulerBackend: Granted executor ID app-20211214160613-0003/0 on hostPort 123.59.17.46:7078 with 5 core(s), 10.0 GB RAM
21/12/14 16:06:13 INFO StandaloneAppClient$ClientEndpoint: Executor added: app-20211214160613-0003/1 on worker-20211214153850-123.59.17.45-7078 (123.59.17.45:7078) with 5 core(s)
21/12/14 16:06:13 INFO StandaloneSchedulerBackend: Granted executor ID app-20211214160613-0003/1 on hostPort 123.59.17.45:7078 with 5 core(s), 10.0 GB RAM
21/12/14 16:06:13 INFO StandaloneAppClient$ClientEndpoint: Executor added: app-20211214160613-0003/2 on worker-20211214153403-123.59.17.44-7078 (123.59.17.44:7078) with 5 core(s)
21/12/14 16:06:13 INFO StandaloneSchedulerBackend: Granted executor ID app-20211214160613-0003/2 on hostPort 123.59.17.44:7078 with 5 core(s), 10.0 GB RAM
21/12/14 16:06:13 INFO Utils: Successfully started service ‘org.apache.spark.network.netty.NettyBlockTransferService’ on port 31079.
21/12/14 16:06:13 INFO NettyBlockTransferService: Server created on 123.59.17.44:31079
21/12/14 16:06:13 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
21/12/14 16:06:13 INFO StandaloneAppClient$ClientEndpoint: Executor updated: app-20211214160613-0003/1 is now RUNNING
21/12/14 16:06:13 INFO StandaloneAppClient$ClientEndpoint: Executor updated: app-20211214160613-0003/2 is now RUNNING
21/12/14 16:06:13 INFO StandaloneAppClient$ClientEndpoint: Executor updated: app-20211214160613-0003/0 is now RUNNING
21/12/14 16:06:14 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 123.59.17.44, 31079, None)
21/12/14 16:06:14 INFO BlockManagerMasterEndpoint: Registering block manager 123.59.17.44:31079 with 991.5 MB RAM, BlockManagerId(driver, 123.59.17.44, 31079, None)
21/12/14 16:06:14 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 123.59.17.44, 31079, None)
21/12/14 16:06:14 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 123.59.17.44, 31079, None)
21/12/14 16:06:14 INFO StandaloneSchedulerBackend: SchedulerBackend is ready for scheduling beginning after reached minRegisteredResourcesRatio: 0.0
Exception in thread “main” java.lang.NoSuchMethodError: scala.Function1.$init$(Lscala/Function1;)V
at org.apache.spark.sql.TiExtensions.(TiExtensions.scala:23)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at java.lang.Class.newInstance(Class.java:442)
at org.apache.spark.sql.SparkSession$Builder.liftedTree1$1(SparkSession.scala:945)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:943)
at org.apache.spark.sql.hive.thriftserver.SparkSQLEnv$.init(SparkSQLEnv.scala:48)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.(SparkSQLCLIDriver.scala:315)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:166)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
21/12/14 16:06:14 INFO SparkContext: Invoking stop() from shutdown hook
21/12/14 16:06:14 INFO SparkUI: Stopped Spark web UI at http://123.59.17.44:4040
21/12/14 16:06:14 INFO StandaloneSchedulerBackend: Shutting down all executors
21/12/14 16:06:14 INFO CoarseGrainedSchedulerBackend$DriverEndpoint: Asking each executor to shut down
21/12/14 16:06:14 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
21/12/14 16:06:14 INFO MemoryStore: MemoryStore cleared
21/12/14 16:06:14 INFO BlockManager: BlockManager stopped
21/12/14 16:06:14 INFO BlockManagerMaster: BlockManagerMaster stopped
21/12/14 16:06:14 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
21/12/14 16:06:14 INFO SparkContext: Successfully stopped SparkContext
21/12/14 16:06:14 INFO ShutdownHookManager: Shutdown hook called
21/12/14 16:06:14 INFO ShutdownHookManager: Deleting directory /tmp/spark-d4d3a693-7c24-4573-8ca0-e83cdadcc1c1
21/12/14 16:06:14 INFO ShutdownHookManager: Deleting directory /tmp/spark-1023cdac-0584-4dbe-85ce-986d7bc9a20f