Sunday, June 7, 2009

whos blocking


-- A quick little standalone script that tells you what process is blocking and what processes the blocking processing actually blocking.

-- When running this script in QA, change your output to "Results in Text" ( CTRL-T ).  Utilizes the blocking info in sp_who2 combined with dbcc inputbuffer and a little cursor to wrap it all up.  Formatting needs a little work, but the info is all there.

set nocount on
declare @spid varchar(10)
declare @blkby varchar(10)
declare @stmt varchar(100)
if not exists ( select top 1 name from tempdb..sysobjects where name like '#temp%' )
begin
   create table #temp ( spid integer, status varchar(100), login varchar(50), hostname varchar(25), blkby varchar(10), 
                        dbname varchar(25), command varchar(100), cputime integer, diskio integer, lastbatch varchar(25), 
                        programname varchar(255), spid2 integer )
end
else
begin
   truncate table #temp
end
insert into #temp
exec sp_who2

declare curs cursor for
select convert(varchar(10),spid), blkby from #temp where blkby not like '%.%'

open curs

fetch next from curs into @spid, @blkby
while @@fetch_status = 0
begin
   set @stmt = 'dbcc inputbuffer(' + @blkby + ')'
   raiserror('SPID:%s is Blocking with the following statement',0,1,@blkby) with nowait
   exec (@stmt)
   raiserror('SPID that is Blocked:%s',0,1,@spid) with nowait
   set @stmt = 'dbcc inputbuffer(' + convert(varchar(10), @spid) + ')'
   exec (@stmt)
   fetch next from curs into @spid, @blkby
end

close curs 

deallocate curs

script out logins for 2000 to 2005 servers

USE master 
GO 
IF OBJECT_ID ('sp_hexadecimal') IS NOT NULL 
DROP PROCEDURE sp_hexadecimal 
GO 
CREATE PROCEDURE sp_hexadecimal 
@binvalue varbinary(256), 
@hexvalue varchar(256) OUTPUT 
AS 
DECLARE @charvalue varchar(256) 
DECLARE @i int 
DECLARE @length int 
DECLARE @hexstring char(16) 
SELECT @charvalue = '0x' 
SELECT @i = 1 
SELECT @length = DATALENGTH (@binvalue) 
SELECT @hexstring = '0123456789ABCDEF' 
WHILE (@i <= @length) 
BEGIN 
DECLARE @tempint int 
DECLARE @firstint int 
DECLARE @secondint int 
SELECT @tempint = CONVERT(int, SUBSTRING(@binvalue,@i,1)) 
SELECT @firstint = FLOOR(@tempint/16) 
SELECT @secondint = @tempint - (@firstint*16) 
SELECT @charvalue = @charvalue + 
SUBSTRING(@hexstring, @firstint+1, 1) + 
SUBSTRING(@hexstring, @secondint+1, 1) 
SELECT @i = @i + 1 
END 
SELECT @hexvalue = @charvalue 
GO 

IF OBJECT_ID ('sp_help_revlogin_2000_to_2005') IS NOT NULL 
DROP PROCEDURE sp_help_revlogin_2000_to_2005 
GO 
CREATE PROCEDURE sp_help_revlogin_2000_to_2005 

@login_name sysname = NULL, 
@include_db bit = 0, 
@include_role bit = 0 

AS 
DECLARE @name sysname 
DECLARE @xstatus int 
DECLARE @binpwd varbinary (256) 
DECLARE @dfltdb varchar (256) 
DECLARE @txtpwd sysname 
DECLARE @tmpstr varchar (256) 
DECLARE @SID_varbinary varbinary(85) 
DECLARE @SID_string varchar(256) 

IF (@login_name IS NULL) 
DECLARE login_curs CURSOR STATIC FOR 
SELECT sid, [name], xstatus, password, isnull(db_name(dbid), 'master') 
FROM master.dbo.sysxlogins 
WHERE srvid IS NULL AND 
[name] <> 'sa' 
ELSE 
DECLARE login_curs CURSOR FOR 
SELECT sid, [name], xstatus, password, isnull(db_name(dbid), 'master') 
FROM master.dbo.sysxlogins 
WHERE srvid IS NULL AND 
[name] = @login_name 

OPEN login_curs 

FETCH NEXT FROM login_curs INTO @SID_varbinary, @name, @xstatus, @binpwd, @dfltdb 

IF (@@fetch_status = -1) 
BEGIN 
PRINT 'No login(s) found.' 
CLOSE login_curs 
DEALLOCATE login_curs 
RETURN -1 
END 

SET @tmpstr = '/* sp_help_revlogin script ' 
PRINT @tmpstr 
SET @tmpstr = '** Generated ' 
+ CONVERT (varchar, GETDATE()) + ' on ' + @@SERVERNAME + ' */' 
PRINT @tmpstr 
PRINT '' 
PRINT '' 
PRINT '' 
PRINT '/***** CREATE LOGINS *****/' 

WHILE @@fetch_status = 0 
BEGIN 
PRINT '' 
SET @tmpstr = '-- Login: ' + @name 
PRINT @tmpstr 

IF (@xstatus & 4) = 4 
BEGIN -- NT authenticated account/group 
IF (@xstatus & 1) = 1 
BEGIN -- NT login is denied access 
SET @tmpstr = '' --'EXEC master..sp_denylogin ''' + @name + '''' 
PRINT @tmpstr 
END 
ELSE 
BEGIN -- NT login has access 
SET @tmpstr = 'IF NOT EXISTS (SELECT * FROM sys.server_principals WHERE [name] = ''' + @name + ''')' 
PRINT @tmpstr 
SET @tmpstr = CHAR(9) + 'CREATE LOGIN [' + @name + '] FROM WINDOWS' 
PRINT @tmpstr 
END 
END 
ELSE 
BEGIN -- SQL Server authentication 
EXEC sp_hexadecimal @SID_varbinary, @SID_string OUT 

IF (@binpwd IS NOT NULL) 
BEGIN -- Non-null password 
EXEC sp_hexadecimal @binpwd, @txtpwd OUT 
SET @tmpstr = 'CREATE LOGIN [' + @name + '] WITH PASSWORD=' + @txtpwd + ' HASHED' 
END 
ELSE 
BEGIN -- Null password 
SET @tmpstr = 'CREATE LOGIN [' + @name + '] WITH PASSWORD=''''' 
END 

SET @tmpstr = @tmpstr + ', CHECK_POLICY=OFF, SID=' + @SID_string 
PRINT @tmpstr 
END 

FETCH NEXT FROM login_curs INTO @SID_varbinary, @name, @xstatus, @binpwd, @dfltdb 
END 

IF @include_db = 1 
BEGIN 
PRINT '' 
PRINT '' 
PRINT '' 
PRINT '/***** SET DEFAULT DATABASES *****/' 

FETCH FIRST FROM login_curs INTO @SID_varbinary, @name, @xstatus, @binpwd, @dfltdb 

WHILE @@fetch_status = 0 
BEGIN 
PRINT '' 
SET @tmpstr = '-- Login: ' + @name 
PRINT @tmpstr 

SET @tmpstr = 'ALTER LOGIN [' + @name + '] WITH DEFAULT_DATABASE=[' + @dfltdb + ']' 
PRINT @tmpstr 

FETCH NEXT FROM login_curs INTO @SID_varbinary, @name, @xstatus, @binpwd, @dfltdb 
END 
END 

IF @include_role = 1 
BEGIN 
PRINT '' 
PRINT '' 
PRINT '' 
PRINT '/***** SET SERVER ROLES *****/' 

FETCH FIRST FROM login_curs INTO @SID_varbinary, @name, @xstatus, @binpwd, @dfltdb 

WHILE @@fetch_status = 0 
BEGIN 
PRINT '' 
SET @tmpstr = '-- Login: ' + @name 
PRINT @tmpstr 

IF @xstatus &16 = 16 -- sysadmin 
BEGIN 
SET @tmpstr = 'exec master.dbo.sp_addsrvrolemember @loginame=''' + @name + ''', @rolename=''sysadmin''' 
PRINT @tmpstr 
END 

IF @xstatus &32 = 32 -- securityadmin 
BEGIN 
SET @tmpstr = 'exec master.dbo.sp_addsrvrolemember @loginame=''' + @name + ''', @rolename=''securityadmin''' 
PRINT @tmpstr 
END 

IF @xstatus &64 = 64 -- serveradmin 
BEGIN 
SET @tmpstr = 'exec master.dbo.sp_addsrvrolemember @loginame=''' + @name + ''', @rolename=''serveradmin''' 
PRINT @tmpstr 
END 

IF @xstatus &128 = 128 -- setupadmin 
BEGIN 
SET @tmpstr = 'exec master.dbo.sp_addsrvrolemember @loginame=''' + @name + ''', @rolename=''setupadmin''' 
PRINT @tmpstr 
END 

IF @xstatus &256 = 256 --processadmin 
BEGIN 
SET @tmpstr = 'exec master.dbo.sp_addsrvrolemember @loginame=''' + @name + ''', @rolename=''processadmin''' 
PRINT @tmpstr 
END 

IF @xstatus &512 = 512 -- diskadmin 
BEGIN 
SET @tmpstr = 'exec master.dbo.sp_addsrvrolemember @loginame=''' + @name + ''', @rolename=''diskadmin''' 
PRINT @tmpstr 
END 

IF @xstatus &1024 = 1024 -- dbcreator 
BEGIN 
SET @tmpstr = 'exec master.dbo.sp_addsrvrolemember @loginame=''' + @name + ''', @rolename=''dbcreator''' 
PRINT @tmpstr 
END 

IF @xstatus &4096 = 4096 -- bulkadmin 
BEGIN 
SET @tmpstr = 'exec master.dbo.sp_addsrvrolemember @loginame=''' + @name + ''', @rolename=''bulkadmin''' 
PRINT @tmpstr 
END 

FETCH NEXT FROM login_curs INTO @SID_varbinary, @name, @xstatus, @binpwd, @dfltdb 
END 
END 

CLOSE login_curs 
DEALLOCATE login_curs 
RETURN 0 
GO

exec sp_help_revlogin_2000_to_2005 @login_name=NULL, @include_db=1, @include_role=1
GO

Reading a SQL Profiler Trace file

Reading a SQL Profiler Trace file

In SQL Profiler you can open data from a file or a table with File -> Open menu option. If you have your data in multiple rolled over files SQL profiler will ask you if you'd like to open the next file after it reaches the end of the current file. You can also replay your trace using the Replay menu in which you can even set breakpoints.

Personally I like to analyze my trace files with SQL. To do that you have to have the data in a trace table. If you have a file you should import that file into the table.

The only way to do that is by using the fn_trace_gettable built-in table valued function:

select *

from fn_trace_gettable('c:\MyTraceFile.trc', default)

The first parameter is obviously the path to the trace file on the server's disk. The second parameter is the number of rolled over files to be read. The default value is -1 which means all files are read. If you want to read just one of many rolled over files use 1 as your parameter value.

Although you could use the function to read the trace file each time, the better way is to simply import the trace into a table and query the table itself since reading the file each time is slow. A simple way to do this is:

select IDENTITY(BIGINT, 1, 1) AS RowNumber, *

into MyTraceTable

from fn_trace_gettable('c:\MyTraceFile.trc', default)

where 1 = 0 -- just create the table without any data

After you have the table definition you can index it to your liking and then simply use insert to populate the table:

insert into MyTraceTable()

select

from fn_trace_gettable('c:\MyTraceFile.trc', default)

Combining SQL Profiler Trace file and PerfMon log file

Start SQL Profiler and open a trace file in it. The SQL Trace file and the PerfMon log file must be time synchronized. So either you have to be gathered on the same machine or the 2 machines have to be time synchronized. Wait until the SQL Trace file is fully loaded. Then go to File -> Import Performance Data...

Choose the PerfMon log file (.BLG, .CSV) that correlates with the SQL trace file. A dialog will open asking you to select the counters you'd like to display. Click OK and you should get a screen looking similar to this:

Combine SQL Profiler Trace file and PerfMon counters log file

Picture 4: Combine SQL Profiler Trace file and PerfMon counters log file

By clicking on the PerfMon graph you can see that the selected row in your trace changes. The PerfMon counter has the minimum polling interval of 1 second so you will get more than one sql event per PerfMon counter and the selected row in the trace is the row that best corresponds with the PerfMon counter at that time.

 

Shrink a named transaction log file belonging to a database

/*
Shrink a named transaction log file belonging to a database

Originally found at;

http://support.microsoft.com/support/kb/articles/q256/6/50.asp

Changes:
28.08.2001
Modified the inner loop so it tested the dx time so long overruns did not happen
Modified the inner loop so it had a fixed minimum quantity so there was no skip in skip out

29.08.2001
Modified the inner loop so it had a dynamic minimum quantity to allow faster shrinkage

24.01.2002
Modified the USE statement so it uses brackets around the dbname
Modified the @TruncLog variable so it uses brackets around the dbname

31.05.2002
Modified the code to use PRINT instead of SELECT in several cases
Modified the code to use @MaxCount instead of two unclear rules 
Modified the code to use @Factor instead of several hard-coded values 
Commented the use of @Factor
Moved the configuration and @Counter init code to before the start of the first loop to avoid repetition
Modified the code to display the process runtime in seconds rather than minutes

*/

SET NOCOUNT ON

DECLARE @LogicalFileName SYSNAME,
        @MaxMinutes INT,
        @NewSize INT,
        @Factor FLOAT

/*
  The process has several control parameters, most of the time you only need to worry about the first four
    as these are the big controls whereas the fifth is simply a fine tuning control which rarely needs to 
    come into play.
*/

--This is the name of the database for which the log will be shrunk.
USE [Speqs]

--Use sp_helpfile to identify the logical file name that you want to shrink.
SET @LogicalFileName = 'Speqs_Log';
--Limit on time allowed to wrap log in minutes
SET @MaxMinutes = 5;
--Ideal size of logfile in MB
SET @NewSize =100;

/*
  Factor determining maximum number of pages to pad out based on the original number of pages in use 
    (single page = 8K).  Values in the range 1.0 - 0.8 seems to work well for many databases.

  Increasing the number will increase the maximum number of pages allowed to be padded, which should
    force larger amounts of data to be dropped before the process finishes.  Often speeds up shrinking
    very large databases which are going through the process before the timer runs out.

  Decreasing the number will decrease the maximum number of pages allowed to be padded, which should
    force less work to be done.  Often aids with forcing smaller databases to shrink to minimum size
    when larger values were actually expanding them.

*/
SET @Factor = 1.0;                        

/*
  All code after this point is driven by these parameters and will not require editing unless you need to 
    fix a bug in the padding/shrinking process itself.
*/

-- Setup / initialize
DECLARE @OriginalSize INT,
        @StringData VARCHAR(500)

SELECT @OriginalSize = size -- in 8K pages
FROM sysfiles
WHERE name = @LogicalFileName;

SELECT @StringData = 'Original Size of ' + db_name() + ' LOG is ' + 
    CONVERT(VARCHAR(30),@OriginalSize) + ' 8K pages or ' + 
    CONVERT(VARCHAR(30),(@OriginalSize*8/1024)) + 'MB'
FROM sysfiles
WHERE name = @LogicalFileName;

PRINT @StringData;
PRINT ''

--Drop the temporary table if it already exists
IF ( OBJECT_ID('[dbo].[DummyTrans]') IS NOT NULL )
  DROP TABLE [DummyTrans]

CREATE TABLE [DummyTrans]( [DummyColumn] CHAR(8000) NOT NULL );

-- Wrap log and truncate it.
DECLARE @Counter   INT,
        @MaxCount  INT,
        @StartTime DATETIME,
        @TruncLog  VARCHAR(500)

-- Try an initial shrink.
DBCC SHRINKFILE (@LogicalFileName, @NewSize)

SET @TruncLog = 'BACKUP LOG [' + db_name() + '] WITH TRUNCATE_ONLY';
EXEC (@TruncLog)

-- Configure limiter
IF @OriginalSize / @Factor > 50000
    SET @MaxCount = 50000
ELSE
    SET @MaxCount = @OriginalSize * @Factor

-- Attempt to shrink down the log file
PRINT 'Minimum Quantity : '+CAST( @MaxCount AS VARCHAR(10) )
PRINT 'Maximum Time : '+CAST( @MaxMinutes AS VARCHAR(10) )+' minutes ('+CAST( @MaxMinutes*60 AS VARCHAR(10) )+' seconds)'
PRINT ''

SET @Counter = 0;
SET @StartTime = GETDATE();

--loop the padding code to reduce the log while
-- within time limit and 
-- log has not been shrunk enough
WHILE (
    (@MaxMinutes*60 > DATEDIFF(ss, @StartTime, GETDATE())) AND
    (@OriginalSize = (SELECT size FROM sysfiles WHERE name = @LogicalFileName)) AND
    ((@OriginalSize * 8 / 1024) > @NewSize)
)
BEGIN --Outer loop.

    --pad out the logfile a page at a time while
    -- number of pages padded does not exceed our maximum page padding limit
    -- within time limit and 
    -- log has not been shrunk enough
    WHILE (
        (@Counter < @MaxCount) AND 
        (@MaxMinutes*60 > DATEDIFF(ss, @StartTime, GETDATE())) AND
        (@OriginalSize = (SELECT size FROM sysfiles WHERE name = @LogicalFileName)) AND
        ((@OriginalSize * 8 / 1024) > @NewSize)
    )
    BEGIN --Inner loop
        
        INSERT INTO DummyTrans VALUES ('Fill Log')  -- Because it is a char field it inserts 8000 bytes.
        DELETE FROM DummyTrans
        SELECT @Counter = @Counter + 1

        --Every 1,000 cycles tell the user what is going on
        IF ROUND( @Counter , -3 ) = @Counter
        BEGIN
            PRINT 'Padded '+LTRIM( CAST( @Counter*8 AS VARCHAR(10) ) )+'K @ '+LTRIM( CAST( DATEDIFF( ss, @StartTime, GETDATE() ) AS VARCHAR(10) ) )+' seconds';
        END
    END

    --See if a trunc of the log shrinks it.
    EXEC( @TruncLog )

END
PRINT ''

SELECT @StringData = 'Final Size of ' + db_name() + ' LOG is ' +
   CONVERT(VARCHAR(30),size) + ' 8K pages or ' + 
   CONVERT(VARCHAR(30),(size*8/1024)) + 'MB'
FROM sysfiles 
WHERE name = @LogicalFileName;

PRINT @StringData
PRINT ''

DROP TABLE DummyTrans;
PRINT '*** Perform a full database backup ***'

SET NOCOUNT OFF