SQL Server - 使用effectiveFrom和EffectiveTo日期更新数据 - Upsert优化

时间:2017-10-03 10:55:55

标签: sql-server temporal-tables

我们需要创建一个保持时间有效性的表(即对于给定的密钥,在这种情况下,下表中的Md5,将没有重叠的时段)。用户需要能够设置EffectiveFromEffectiveTo日期,因此时态表无用,因为它们似乎只允许系统生成日期。用例是上传批量数据并设置有效日期范围,这需要应用于现有数据,以确保没有时间段重叠。

表定义:

IF OBJECT_ID('dbo.IngestedData', 'U') IS NOT NULL
    DROP TABLE IngestedData;

CREATE TABLE IngestedData
(   
    ID INT IDENTITY(1,1),
    Md5 VARCHAR(15) NOT NULL,   
    EffectiveFrom DATE NOT NULL,    
    EffectiveTo DATE NOT NULL,
    UpdateUser VARCHAR(50),
    JsonData VARCHAR(MAX),
    CONSTRAINT CK_IngestedData_Start_End CHECK (EffectiveFrom < EffectiveTo),
    CONSTRAINT UK_IngestedData_Md5_Start_End UNIQUE(Md5, EffectiveFrom),
    PRIMARY KEY (Id)
);

CREATE NONCLUSTERED INDEX AK_IngestedData_Md5 
   ON IngestedData (Md5); 
CREATE NONCLUSTERED INDEX AK_IngestedData_EffectiveFrom   
   ON IngestedData (EffectiveFrom);
CREATE NONCLUSTERED INDEX AK_IngestedData_EffectiveTo 
   ON IngestedData (EffectiveTo);

我编写了一个upsert过程,适用于单行更新,如下所示:

Upsert程序:

CREATE PROCEDURE dbo.usp_UpsertIngestedDataRow 
    @Md5 VARCHAR(20),
    @EffectiveFrom DateTime,
    @EffectiveTo DateTime,
    @UpdateUser VARCHAR(50),
    @JsonData VARCHAR(MAX)
AS
BEGIN
    SET NOCOUNT ON;

    BEGIN TRY;
    BEGIN TRANSACTION;
        --Select the data that needs to be modified along with the action to be taken
        WITH NewRow(ID, Md5, EffectiveFrom, EffectiveTo, UpdateUser, JsonData, [Action]) AS
        (
            SELECT NULL, @Md5, @EffectiveFrom, @EffectiveTo, @UpdateUser, @JsonData, 'I'
        ),
        OverlappingRows(ID, Md5, EffectiveFrom, EffectiveTo, UpdateUser, JsonData) AS
        (
            SELECT 
                X.ID, X.Md5, X.EffectiveFrom, X.EffectiveTo, X.UpdateUser, X.JsonData 
            FROM 
                NewRow A 
            JOIN 
                IngestedData X ON (X.EffectiveFrom < A.EffectiveTo
                               AND X.EffectiveTo > A.EffectiveFrom)
                               AND A.Md5 = X.Md5
        ),
        NewStartRows(ID, Md5, EffectiveFrom, EffectiveTo, UpdateUser, JsonData, [Action]) AS
        (
            SELECT 
                s.ID, s.Md5, s.EffectiveFrom, 
                (SELECT DATEADD(DAY, -1, MIN(EffectiveFrom)) 
                 FROM NewRow), 
                s.UpdateUser, s.JsonData, 'I'
            FROM
                OverlappingRows s
            WHERE 
                EffectiveFrom < (SELECT MIN(EffectiveFrom) FROM NewRow)
        ),
        NewEndRows(ID, Md5, EffectiveFrom, EffectiveTo, UpdateUser, JsonData, [Action]) AS
        (
            SELECT 
                s.ID, s.Md5, 
                (SELECT DATEADD(DAY, 1, MIN(EffectiveTo)) 
                 FROM NewRow), 
                s.EffectiveTo, s.UpdateUser, s.JsonData, 'I'
            FROM  
                OverlappingRows s
            WHERE 
                EffectiveTo > (SELECT MAX(EffectiveTo) FROM NewRow)
        ),
        DeleteRows(ID, Md5, EffectiveFrom, EffectiveTo, UpdateUser, JsonData, [Action]) AS
        (
            SELECT 
                del.ID, del.Md5, del.EffectiveFrom, del.EffectiveTo, 
                del.UpdateUser, del.JsonData, 'D'
            FROM
                OverlappingRows del 
            INNER JOIN 
                NewRow n ON n.EffectiveFrom <= del.EffectiveFrom 
                         AND n.EffectiveTo >= del.EffectiveTo
        )
        SELECT *
        INTO #Temp
        FROM
            (SELECT * FROM NewRow
             UNION
             SELECT * FROM NewStartRows
             UNION
             SELECT * FROM NewEndRows
             UNION
             SELECT * FROM DeleteRows) AS Data;

        --Delete any rows that are being replaced
        DELETE FROM IngestedData WHERE ID IN (SELECT DISTINCT ID FROM #Temp)

        --Insert the replacement
        INSERT INTO IngestedData(Md5, EffectiveFrom, EffectiveTo, UpdateUser, JsonData) 
        SELECT Md5, EffectiveFrom, EffectiveTo, UpdateUser, JsonData
        FROM #Temp
        WHERE [Action] = 'I'

        --Drop temp table
        IF  OBJECT_ID('tempdb.dbo.#Temp', 'U') IS NOT NULL
            DROP TABLE #Temp

    COMMIT;
    END TRY
    BEGIN CATCH
        ROLLBACK;
        THROW;
    END CATCH


END
GO

即使表中填充了10,000,000条记录,单个呼叫性能也很好,呼叫大约需要7毫秒。问题在于进行大量更新。通过游标对35,000条记录执行上述存储过程大约需要5分钟。

我尝试重写过程以获取一个表变量,这将允许DML使用set操作但在逻辑中丢失。任何人都可以帮助将上述逻辑转换为遵循此模式的基于集合的更新:

新存储过程:

CREATE PROCEDURE usp_BulkUpsertIngestedData 
    @UpdateUser VARCHAR(15), 
    @NewRows DataIngestionRecord READONLY
AS
BEGIN

类型定义

CREATE TYPE DataIngestionRecord AS TABLE
            (
                Md5 VARCHAR(15) NOT NULL,   
                EffectiveFrom DATE NOT NULL,    
                EffectiveTo DATE NOT NULL,
                JsonData VARCHAR(MAX)
            )

1 个答案:

答案 0 :(得分:1)

在尝试禁用然后重建索引并删除过程中的CTE时,我发现在使用逐行更新时,性能根本没有得到改善。

我采取了另一种策略,并决定通过指定在任何给定的更新中每个唯一的Md5只能应用一个新的时间范围来限制upsert用例。这简化了将存储过程转换为基于集合的操作所需的逻辑(并符合我们的要求)。

我确实接受了@ Tanner的建议并从存储过程中删除了链式CTE。最终的存储过程最终为:

CREATE PROCEDURE dbo.usp_UpsertIngestedDataSet 
    @NewRows DataIngestionRecord READONLY,
    @UpdateUser VARCHAR(15)
AS
BEGIN

    SET NOCOUNT ON;

    --Ensure that there are not multiple temporal regions in the update data for a given key
    SELECT Md5
    INTO #Duplicates
    FROM @NewRows
    GROUP BY Md5
    HAVING COUNT(*) > 1;

    IF(@@ROWCOUNT > 0) BEGIN
        DECLARE @Err VARCHAR(MAX)
        SELECT @Err = COALESCE(@Err + CHAR(13), '') + Md5
        FROM #Duplicates
        ORDER BY Md5;

        SET @Err = 'The following Md5 values have multiple temporal ranges in the uploaded data which is not supported: ' + char(13) + @Err;

        THROW 50002, @Err, 1;
    END

    --Determine all overlapping rows from the existing data set
    SELECT id.ID, id.Md5, id.EffectiveFrom, id.EffectiveTo, id.UpdateUser, id.JsonData
    INTO #OverlappingRecords
    FROM IngestedData id JOIN @NewRows nr ON 
        id.Md5 = nr.Md5 AND
        (id.EffectiveFrom < nr.EffectiveTo 
        AND id.EffectiveTo > nr.EffectiveFrom)

    --Calculate truncation of left overlapping rows
    SELECT ol.Id,ol.Md5, ol.EffectiveFrom, DATEADD(DAY,-1, nr.EffectiveFrom) AS EffectiveTo, 'U' AS Action
    INTO #Changes
    FROM #OverlappingRecords ol JOIN @NewRows nr ON 
        ol.Md5 = nr.Md5 
        AND ol.EffectiveFrom < nr.EffectiveFrom

    --Calculate truncation of right overlapping rows
    INSERT INTO #Changes
    SELECT ol.ID, ol.Md5, DATEADD(DAY,1,nr.EffectiveTo), ol.EffectiveTo, 'U'
    FROM #OverlappingRecords ol JOIN @NewRows nr ON
        ol.Md5 = nr.Md5 
        AND ol.EffectiveTo > nr.EffectiveTo
        AND ol.EffectiveFrom > nr.EffectiveFrom;

    --If any area overlaps both the left and right of a new region we need a new insert for the right overlap
    SELECT ol.ID, ol.Md5, DATEADD(DAY,1,nr.EffectiveTo) AS EffectiveFrom, ol.EffectiveTo, 'I' AS [Action]
    INTO #InsertRecords
    FROM #OverlappingRecords ol JOIN @NewRows nr ON
        ol.Md5 = nr.Md5 
        AND ol.EffectiveTo > nr.EffectiveTo
        AND ol.EffectiveFrom < nr.EffectiveFrom;

    BEGIN TRANSACTION;

    --Delete all overwritten regions (i.e. existing temporal ranges that are completely replaced by a new range)
    DELETE FROM IngestedData 
    WHERE ID IN (SELECT ol.ID
                 FROM #OverlappingRecords ol JOIN @NewRows nr ON 
                    ol.Md5 = nr.Md5 
                    AND nr.EffectiveFrom <= ol.EffectiveFrom 
                    AND nr.EffectiveTo >= ol.EffectiveTo);

    --Insert New Data (both from uploaded data and from existing region splits)
    INSERT INTO IngestedData (Md5, EffectiveFrom, EffectiveTo, UpdateUser, JsonData)
    SELECT Md5, EffectiveFrom, EffectiveTo, 'user2', JsonData
    FROM @NewRows
    UNION
    SELECT id.Md5,ir.EffectiveFrom, ir.EffectiveTo,id.UpdateUser,id.JsonData
    FROM IngestedData id JOIN #InsertRecords ir
    ON id.ID = ir.ID AND ir.[Action] = 'I';

    --Update truncated rows
    Update id
    SET EffectiveFrom = u.EffectiveFrom, EffectiveTo = u.EffectiveTo
    FROM IngestedData id JOIN #Changes u ON id.ID = u.ID AND u.[Action] = 'U';

    COMMIT;

END
GO

将此代码翻译为基于集合的逻辑有所不同,此版本现在可以在7370毫秒内完成20,000,000个数据的更新,以及1,000,000行数据。