te')); return $arr; } /* 遍历用户所有主题 * @param $uid 用户ID * @param int $page 页数 * @param int $pagesize 每页记录条数 * @param bool $desc 排序方式 TRUE降序 FALSE升序 * @param string $key 返回的数组用那一列的值作为 key * @param array $col 查询哪些列 */ function thread_tid_find_by_uid($uid, $page = 1, $pagesize = 1000, $desc = TRUE, $key = 'tid', $col = array()) { if (empty($uid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('uid' => $uid), array('tid' => $orderby), $page, $pagesize, $key, $col); return $arr; } // 遍历栏目下tid 支持数组 $fid = array(1,2,3) function thread_tid_find_by_fid($fid, $page = 1, $pagesize = 1000, $desc = TRUE) { if (empty($fid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('fid' => $fid), array('tid' => $orderby), $page, $pagesize, 'tid', array('tid', 'verify_date')); return $arr; } function thread_tid_delete($tid) { if (empty($tid)) return FALSE; $r = thread_tid__delete(array('tid' => $tid)); return $r; } function thread_tid_count() { $n = thread_tid__count(); return $n; } // 统计用户主题数 大数量下严谨使用非主键统计 function thread_uid_count($uid) { $n = thread_tid__count(array('uid' => $uid)); return $n; } // 统计栏目主题数 大数量下严谨使用非主键统计 function thread_fid_count($fid) { $n = thread_tid__count(array('fid' => $fid)); return $n; } ?>sql - Stored procedure keeps locking itself - Stack Overflow
最新消息:雨落星辰是一个专注网站SEO优化、网站SEO诊断、搜索引擎研究、网络营销推广、网站策划运营及站长类的自媒体原创博客

sql - Stored procedure keeps locking itself - Stack Overflow

programmeradmin3浏览0评论

We trigger a stored procedure that inserts into table1, but then we want to remove extra rows, so there are no duplicates. The problem is when we try to remove these dups, it locks the table until it times out.

We perform the following insert:

INSERT INTO table1 (momt, message_log_id, sender, receiver, msgdata, smsc_id, sms_type, coding, dlr_mask, dlr_url, validity, boxc_id, carrier_id, destination)
    SELECT 'mt', @messageBatchKey, @sender, rtrim(ltrim(e.country_code))+e.sub_user_number, msgdata = 
    CASE
        WHEN s.lang = 'english' AND A.sms_limit = '1' THEN LEFT(@messageValuesms, 160)
        WHEN s.lang = 'english' AND A.sms_limit = '2' THEN LEFT(@messageValuesms, 360)
        WHEN s.lang = 'spanish' THEN (SELECT message from message_log_lang where message_log_id = @messageBatchKey and language = 'ES')
        /***WHEN s.lang = 'spanish' THEN LEFT(@messageValuesp, 160)***/
        WHEN s.lang = 'creole' THEN LEFT(@messageValuecr, 160)
        ELSE LEFT(@messageValuesms, 160)
    END
    , @smscID, '2', '0', '3', 
    @dlr_url + 'sub_id=' + CAST(s.sub_id AS varchar(50)) + '&carrierID=' + ltrim(rtrim(CAST(e.sub_carrier_id AS varchar(10)))), NULL,
    'inspironMT', c.carrier_id, 
    CASE
        WHEN @accountDestination <> 3 THEN @accountDestination
        ELSE 0
    END
    FROM 
        SUBSCRIPTION s 
    INNER JOIN 
        #distinctSubscriptionKeys d ON s.sub_id = d.distinctSubscriptionKey 
    INNER JOIN 
        PHONENUMBERS e ON s.sub_id = e.sub_id
    INNER JOIN 
        CARRIERS c ON e.sub_carrier_id = c.carrier_id
    INNER JOIN 
        ACCOUNTS A ON s.account_id = a.account_id
    WHERE 
        em_type = 't' 
        AND s.active = '1'
        AND a.sms_send_method = '0'
        AND s.send_email IN ('0', '2')
        AND c.active = 1
        AND (rtrim(ltrim(e.country_code)) = 1 OR rtrim(ltrim(e.country_code)) = '')
        /*AND c.carrier_id IN 
        (
        SELECT carrier_id
        FROM CarrierDestinationProperties
        WHERE send_method = 1
        AND (destination = @accountDestination OR @accountDestination = 3)
        )
        */  
        AND ((e.sub_carrier_id = '5' OR e.sub_carrier_id = '4')
             OR
             ((NOT 0 = (SELECT COUNT(*) FROM #subgroups)
               AND 0 = (SELECT COUNT(a.sub_group_id) 
                        FROM Wens.dbo.ACCOUNT_SUB_GROUPS AS a 
                        INNER JOIN #subgroups AS b ON a.sub_group_id = b.subgroupKey
                        WHERE (a.account_id = @accountKey) 
                          AND (a.active = '1') 
                          AND (a.sms_send_method = '1')))           
    ))
    ORDER BY 
        d.distinctSubscriptionKey

Then right after that we perform the following to remove the dups:

DELETE FROM table1
WHERE sql_id NOT IN (SELECT MIN(sql_id)
                     FROM table1
                     WHERE message_log_id = @messageBatchKey
                     GROUP BY receiver);

It's this second call that locks. And it only happens when we are dealing with 20K+ records. Anything below that works.

Is there a way to group by receiver within the select statement in the insert? I have been trying to find ways to figure this out and each road I go down locks.

Anyone that has a suggestion would be greatly appreciated.

We trigger a stored procedure that inserts into table1, but then we want to remove extra rows, so there are no duplicates. The problem is when we try to remove these dups, it locks the table until it times out.

We perform the following insert:

INSERT INTO table1 (momt, message_log_id, sender, receiver, msgdata, smsc_id, sms_type, coding, dlr_mask, dlr_url, validity, boxc_id, carrier_id, destination)
    SELECT 'mt', @messageBatchKey, @sender, rtrim(ltrim(e.country_code))+e.sub_user_number, msgdata = 
    CASE
        WHEN s.lang = 'english' AND A.sms_limit = '1' THEN LEFT(@messageValuesms, 160)
        WHEN s.lang = 'english' AND A.sms_limit = '2' THEN LEFT(@messageValuesms, 360)
        WHEN s.lang = 'spanish' THEN (SELECT message from message_log_lang where message_log_id = @messageBatchKey and language = 'ES')
        /***WHEN s.lang = 'spanish' THEN LEFT(@messageValuesp, 160)***/
        WHEN s.lang = 'creole' THEN LEFT(@messageValuecr, 160)
        ELSE LEFT(@messageValuesms, 160)
    END
    , @smscID, '2', '0', '3', 
    @dlr_url + 'sub_id=' + CAST(s.sub_id AS varchar(50)) + '&carrierID=' + ltrim(rtrim(CAST(e.sub_carrier_id AS varchar(10)))), NULL,
    'inspironMT', c.carrier_id, 
    CASE
        WHEN @accountDestination <> 3 THEN @accountDestination
        ELSE 0
    END
    FROM 
        SUBSCRIPTION s 
    INNER JOIN 
        #distinctSubscriptionKeys d ON s.sub_id = d.distinctSubscriptionKey 
    INNER JOIN 
        PHONENUMBERS e ON s.sub_id = e.sub_id
    INNER JOIN 
        CARRIERS c ON e.sub_carrier_id = c.carrier_id
    INNER JOIN 
        ACCOUNTS A ON s.account_id = a.account_id
    WHERE 
        em_type = 't' 
        AND s.active = '1'
        AND a.sms_send_method = '0'
        AND s.send_email IN ('0', '2')
        AND c.active = 1
        AND (rtrim(ltrim(e.country_code)) = 1 OR rtrim(ltrim(e.country_code)) = '')
        /*AND c.carrier_id IN 
        (
        SELECT carrier_id
        FROM CarrierDestinationProperties
        WHERE send_method = 1
        AND (destination = @accountDestination OR @accountDestination = 3)
        )
        */  
        AND ((e.sub_carrier_id = '5' OR e.sub_carrier_id = '4')
             OR
             ((NOT 0 = (SELECT COUNT(*) FROM #subgroups)
               AND 0 = (SELECT COUNT(a.sub_group_id) 
                        FROM Wens.dbo.ACCOUNT_SUB_GROUPS AS a 
                        INNER JOIN #subgroups AS b ON a.sub_group_id = b.subgroupKey
                        WHERE (a.account_id = @accountKey) 
                          AND (a.active = '1') 
                          AND (a.sms_send_method = '1')))           
    ))
    ORDER BY 
        d.distinctSubscriptionKey

Then right after that we perform the following to remove the dups:

DELETE FROM table1
WHERE sql_id NOT IN (SELECT MIN(sql_id)
                     FROM table1
                     WHERE message_log_id = @messageBatchKey
                     GROUP BY receiver);

It's this second call that locks. And it only happens when we are dealing with 20K+ records. Anything below that works.

Is there a way to group by receiver within the select statement in the insert? I have been trying to find ways to figure this out and each road I go down locks.

Anyone that has a suggestion would be greatly appreciated.

Share Improve this question edited Feb 17 at 21:04 marc_s 755k184 gold badges1.4k silver badges1.5k bronze badges asked Feb 17 at 20:44 ScottScott 91 bronze badge 13
  • MySQL or SQL Server? They are completely different products... – Dale K Commented Feb 17 at 20:49
  • I suggest the insert isn't relevant to the delete... your question would be clearer and simpler if it just showed the delete. – Dale K Commented Feb 17 at 20:51
  • 6 Why insert and then remove duplicates, it's like taking sand to the beach. You can use select top 1 by group duplicate to just select the relevant records – siggemannen Commented Feb 17 at 21:05
  • 1 On a side note: rtrim(ltrim(e.country_code)) = 1 is going to fail in interesting ways, better to do rtrim(ltrim(e.country_code)) = '1'. Also those SELECT COUNT subqueries should be EXISTS subqueries. Also the ORDER BY is pointless in an INSERT unless you are inserting millions of rows and want to insert in clustered index order, or you need an IDENTITY column in the correct order (why?). – Charlieface Commented 2 days ago
  • 1 Slight detour...you said you are using sql server 2012. That has been fully unsupported for three years. It is long past time to sort out your upgrade path to a supported version of sql server. – Sean Lange Commented 2 days ago
 |  Show 8 more comments

2 Answers 2

Reset to default 0

If the problem is that there are dups of phone numbers maybe this will help:

change

JOIN PHONENUMBERS e ON s.sub_id = e.sub_id

to

JOIN (SELECT sub_id, 
             MAX(comm_type) as comm_type,
             MAX(country_code) as country_code, 
             MAX(sub_carrier_id) as sub_carrier_id,
             MAX(sub_user_number) as sub_user_number 
      FROM PHONENUMBERS 
      GROUP BY sub_id) as e ON s.sub_id = e.sub_id

NOTE: This assumes that every row with the same sub_id also has the same values for all the over fields selected. IF IT DOES NOT this will create hard to find bugs.... since I don't use any criteria for selecting the rows. If you had a criteria (like last modified date) you could select the data based on that criteria. That would look like this:

JOIN (SELECT sub_id, comm_type, country_code, sub_carrier_id, sub_user_number
             ROW_NUMBER() OVER PARTITION BY sub_id ORDER BY modified_date DESC) AS RN
      FROM PHONENUMBERS) as e ON s.sub_id = e.sub_id AND e.RN = 1

This is much more likely to give valid results as it uses all fields from the same row. In the first one using GROUP BY with MAX can have values from different rows since it is the max of all rows with that id.

I would suggest that the delete is causing issues.

Firstly, the delete statement itself may be doing much more than you expect?

DELETE FROM table1
WHERE sql_id NOT IN (SELECT MIN(sql_id)
                     FROM table1
                     WHERE message_log_id = @messageBatchKey
                     GROUP BY receiver);

Say you have 20k rows, with mixed message_log_id values. The SELECT MIN(sql_Id) subquery will only find rows with message_log_id = @messageBatchKey. Therefore, the delete statement will delete all rows in table1 where message_log_id is different from @messageBatchKey, whether or not they are the minimum value (i.e., they won't appear in the subquery).

Therefore, at a minimum, I would suggest changing the above to exlicitly filter the deleted rows to only be those where message_log_id = @messageBatchKey.

DELETE FROM table1
WHERE message_log_id = @messageBatchKey
      AND sql_id NOT IN (SELECT MIN(sql_id)
                     FROM table1
                     WHERE message_log_id = @messageBatchKey
                     GROUP BY receiver);

Secondly, if you don't have a decent index on there, then every time you run the delete statement, it will need to read the whole table to identify the relevant rows - and then sort them.

I would suggest adding an index to make identification of the rows to delete easier

CREATE NONCLUSTERED INDEX IX_table1_messagelogid ON table1
(message_log_id, receiver, sql_id)

This would help identification of rows in both parts of the statement - in the subquery (identifying the MIN(sql_id) values) and limiting the rows in table1 to delete via the message_log_id.

发布评论

评论列表(0)

  1. 暂无评论