Sunday, January 25, 2015

Help needed in Performance Tuning

Hi Erland,


I understand your concern. Isprocessed is a not null column. I tried to replace the temp variable with temp table and for 500k records it tool 31 seconds. Here is my latest try,



Declare @CompID int = 1050;

BEGIN
-- Check if any data available to be processed
IF EXISTS (
SELECT TOP 1 IDcompAFR
FROM Comp_Detail

WHERE CompID = @CompID
AND coalesce(processStatus, 0) = 0
)
BEGIN
BEGIN TRY
-- Set it so if the first UPDATE fails, we won't even start the second update.This really says "If we're in a transaction
-- and something fails, stop processing the transaction and do a rollback if we can".
SET XACT_ABORT ON;

--Create a table to remember the rows we updated.
IF OBJECT_ID('tempdb..#ActualData') IS NOT NULL
BEGIN
DROP TABLE #ActualData;
END
IF OBJECT_ID('tempdb..#AffecedRecords') IS NOT NULL
BEGIN
DROP TABLE #AffecedRecords;
END

CREATE TABLE #ActualData(UserId BIGINT
,IDcompAFR BIGINT
,ProcessingAmount MONEY);

Create table #AffecedRecords (UserId BIGINT);

-- temp variable to hold the actual data. this will be used to get IdcompanyOFR once the balance updated
--DECLARE @ActualData TABLE (
-- UserId BIGINT
-- ,IDcompAFR BIGINT
-- ,ProcessingAmount MONEY
-- );
-- table variable to capture the Affected UserId's
--DECLARE @AffecedRecords TABLE (UserId BIGINT);

BEGIN TRANSACTION;

-- Get the whole data to be processed.
INSERT INTO #ActualData (
UserId
,IDcompAFR
,ProcessingAmount
)
SELECT UserId
,IDcompAFR
,ProcessingAmount = COALESCE(TransferAmount, 0)
FROM Comp_Detail
WHERE CompID = @CompID
AND coalesce(processStatus, 0) = 0
;

-- Aggregare the ProcessingAmount based on UserId
WITH AggregateData
AS (
SELECT UserId
,ProcessingAmount = SUM(COALESCE(ProcessingAmount, 0))
FROM #ActualData
GROUP BY UserId
)
--Do the balance update and capture the UserId that are affected.
UPDATE UB
SET UB.Amount_Available = COALESCE(UB.Amount_Available, 0) + AD.ProcessingAmount
,UB.Amount_Pend = COALESCE(UB.Amount_Pend, 0) - AD.ProcessingAmount
,LastModDt = getdate()
OUTPUT deleted.UserId
INTO #AffecedRecords(UserId)
FROM User_bank UB
INNER JOIN AggregateData AD ON UB.UserId = AD.UserId;

--===== Using the captured UserId get the IDcompAFR from @ActualData temp variable
-- and then update the processStatus = 1
--- means OFR processed for the trip .
UPDATE Comp_Detail
SET processStatus = 1
,AmtTransferDate = getdate()
WHERE IDcompAFR IN (
SELECT DISTINCT AD.IDcompAFR
FROM #ActualData AD
INNER JOIN #AffecedRecords AR ON (AD.UserId = AR.UserId)
)
AND processStatus = 0;

COMMIT TRANSACTION;
END TRY

BEGIN CATCH
DECLARE @ErrorMessage NVARCHAR(4000);
DECLARE @ErrorSeverity INT;
DECLARE @ErrorState INT;

DROP TABLE #ActualData;
DROP TABLE #AffecedRecords;
SELECT @ErrorMessage = ERROR_MESSAGE()
,@ErrorSeverity = ERROR_SEVERITY()
,@ErrorState = ERROR_STATE();

ROLLBACK TRANSACTION;

RAISERROR (
@ErrorMessage
,@ErrorSeverity
,@ErrorState
);
END CATCH;
END
END
GO




--select * from Comp_Detail

--select * from User_bank



Can you please help me if we could still speed up the query? I would still request to post the modified code if you have any other way to tune this logic.


thanks




loving dotnet


No comments:

Post a Comment