以下是我用来解析 350 万行 JSON 数据并将其插入另一个表的查询。
DECLARE @id_control bigint
DECLARE @batchSize bigint
DECLARE @results nvarchar(256)
DECLARE @NextFetchValue bigint
SET @results = 1
SET @batchSize = 20000
SET @id_control = 0
WHILE (@results > 0)
BEGIN
set @NextFetchValue = null
set @NextFetchValue = cast(@id_control as bigint)+ cast( @batchSize as bigint)
set @InsertTableQuery = 'insert into '+@Schemaname+'.'+@Tablename+' (ColId, '+replace(@SQL,'nvarchar(max)','')+')
select top '+cast(@batchSize as nvarchar(256))+' a.RN, qse.* from(select qs.Query, QS.QId as RN from tools.QueryStore qs with(nolock)) a
cross apply openjson(a.Query)
with
(
'+@SQL+'
)qse
where RN > '+cast(@id_control as nvarchar(256))+' and RN <= '+cast(@NextFetchValue as nvarchar(256))
exec sp_executesql @InsertTableQuery
SET @results = @@ROWCOUNT
-- next batch
SET @id_control = @id_control + @batchSize
END
这是查询输出
insert into dbo.Stgtable (ColId, Col1 , Col2 , Col3 , Col4 , Col5 , Col6 , Col7 , Col8 , Col9 , Col10 , Col11 , Col12 , Col14 , Col15 , Col16 , Col17 , Col21 , Col22 , Col23 , Col24 , Col27 , Col28 , Col29 , Col30 , Col31 , Col32 , Col33 , Col34 , Col35 , Col36 )
select top 20000 a.RN, qse.* from(select qs.Query, QS.QId as RN from tools.QueryStore qs with(nolock)) a
cross apply openjson(a.Query)
with
(
Col1 nvarchar(max), Col2 nvarchar(max), Col3 nvarchar(max), Col4 nvarchar(max), Col5 nvarchar(max), Col6 nvarchar(max), Col7 nvarchar(max), Col8 nvarchar(max), Col9 nvarchar(max), Col10 nvarchar(max), Col11 nvarchar(max), Col12 nvarchar(max), Col14 nvarchar(max), Col15 nvarchar(max), Col16 nvarchar(max), Col17 nvarchar(max), Col21 nvarchar(max), Col22 nvarchar(max), Col23 nvarchar(max), Col24 nvarchar(max), Col27 nvarchar(max), Col28 nvarchar(max), Col29 nvarchar(max), Col30 nvarchar(max), Col31 nvarchar(max), Col32 nvarchar(max), Col33 nvarchar(max), Col34 nvarchar(max), Col35 nvarchar(max), Col36 nvarchar(max)
)qse
where RN > 0 and RN <= 20000
在批量执行查询期间 - 我的事务日志大小急剧增加。
如何在不增加事务日志的情况下快速插入数百万条记录?