----- List of Big Tables
USE master
GO
IF EXISTS (SELECT 1 FROM sysobjects WHERE name = 'sp_show_huge_tables' AND type = 'P')
DROP PROC sp_show_huge_tables
GO
CREATE PROC sp_show_huge_tables
(
@top int = NULL,
@include_system_tables bit = 0
)
AS
/*************************************************************************************************
Copyright © 1997 - 2002 Narayana Vyas Kondreddi. All rights reserved.
Purpose: To list the size of all tables in the database in descending order (that is biggere tables first).
Output can be restricted to top n tables. That is you can pass the value 3 to @top parameter to
see the top 3 biggest tables in your database. Optionally, you can use @include_system_tables
parameter, to include systemt tables in the output.
NOTE: Always create this procedure in the master database and call it from the required databases,
as if the stored procedure is created in that database (That is, don't prefix the stored procedure
with the database name).
Written by: Narayana Vyas Kondreddi
http://vyaskn.tripod.com
Tested on: SQL Server 7.0
Date created: January-22-2002 21:37 GMT
Date modified: February-18-2002 11:31 GMT
Email: vyaskn@hotmail.com
Examples:
To list all the user tables in the database along with their sizes:
EXEC sp_show_huge_tables
To see the top three biggest tables in your database:
EXEC sp_show_huge_tables 3
To list all the user AND system tables in the database along with their sizes:
EXEC sp_show_huge_tables @include_system_tables = 1
To see the top three biggest user or system tables in your database:
EXEC sp_show_huge_tables 3, 1
*************************************************************************************************/
BEGIN
IF @top > 0
SET ROWCOUNT @top
SELECT [Table Name], (SELECT rows FROM sysindexes s WHERE s.indid < 2 AND s.id = OBJECT_ID(a.[Table Name])) AS [Row count], [Total space used (MB)] FROM
(
SELECT QUOTENAME(USER_NAME(o.uid)) + '.' + QUOTENAME(OBJECT_NAME(i.id)) AS [Table Name],
CONVERT(numeric(15,2),(((CONVERT(numeric(15,2),SUM(i.reserved)) * (SELECT low FROM master.dbo.spt_values (NOLOCK) WHERE number = 1 AND type = 'E')) / 1024.)/1024.)) AS [Total space used (MB)]
FROM sysindexes i (NOLOCK)
INNER JOIN
sysobjects o (NOLOCK)
ON
i.id = o.id AND
((@include_system_tables = 1 AND o.type IN ('U', 'S')) OR o.type = 'U') AND
((@include_system_tables = 1)OR (OBJECTPROPERTY(i.id, 'IsMSShipped') = 0))
WHERE indid IN (0, 1, 255)
GROUP BY QUOTENAME(USER_NAME(o.uid)) + '.' + QUOTENAME(OBJECT_NAME(i.id))
) as a
ORDER BY [Total space used (MB)] DESC
SET ROWCOUNT 0
END
GO
GRANT EXEC ON sp_show_huge_tables TO Public
/*
Usage:
To list all the user tables in the database along with their sizes:
EXEC sp_show_huge_tables
To see the top three biggest tables in your database:
EXEC sp_show_huge_tables 3
To list all the user AND system tables in the database along with their sizes:
EXEC sp_show_huge_tables @include_system_tables = 1
To see the top three biggest user or system tables in your database:
EXEC sp_show_huge_tables 3, 1
*/
Thursday, May 10, 2007
Nth Highest Number in a record set...
----- Return Nth highest Number in a set of data
----- Ever wondered how to find out the second highest salary from the employees table?
----- Or how to find out the third oldest employee in the company?
----- Here is a stored procedure which accepts the table name, column name, and nth number
----- and displays the nth highest number from the given column.
CREATE PROCEDURE dbo.NthHighestRecord
@TableName sysname
, @ColumnName sysname
, @NthHighestNumber int
AS
----- Written by: Narayana Vyas Kondreddi
----- Date written: December 23rd 2000
----- Purpose: To find out the nth highest number in a column. Eg: Second highest salary from the salaries table
----- Input parameters: Table name, Column name, and the nth position
----- Tested on: SQL Server Version 7.0
----- Email: answer_me@hotmail.com
----- URL: http://vyaskn.tripod.com/code/nth.txt
----- Modified on Thursday May 3, 2007 by Vishal Sinha to clean up code
SET NOCOUNT ON
BEGIN
----- Trim spaces in @TableName
SELECT
@TableName = LTRIM(RTRIM(@TableName))
, @ColumnName = LTRIM(RTRIM(@ColumnName))
----- DECLARE variables
DECLARE
@SQL varchar(1000)
----- Check if Table exists
IF (SELECT OBJECT_ID(@TableName, 'U')) IS NULL
BEGIN
RAISERROR('Invalid Table Name', 18, 1)
RETURN -1
END
IF NOT EXISTS(SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = @TableName AND COLUMN_NAME = @ColumnName)
BEGIN
RAISERROR('Invalid Column Name', 18, 1)
RETURN -1
END
IF @NthHighestNumber <= 0
BEGIN
RAISERROR('Nth Highest Number should be greater than Zero', 18, 1)
RETURN -1
END
----- Create SQL Statement
SELECT
@SQL = '
SELECT
MAX(' + @ColumnName + ')
FROM
' + @TableName + '
WHERE
' + @ColumnName + ' NOT IN (
SELECT TOP
' + LTRIM(STR(@NthHighestNumber - 1)) + ' ' + @ColumnName + '
FROM
' + @TableName + '
ORDER BY
' + @ColumnName + ' DESC )'
----- EXECUTE SQL Statement
EXEC (@SQL)
END
----- Ever wondered how to find out the second highest salary from the employees table?
----- Or how to find out the third oldest employee in the company?
----- Here is a stored procedure which accepts the table name, column name, and nth number
----- and displays the nth highest number from the given column.
CREATE PROCEDURE dbo.NthHighestRecord
@TableName sysname
, @ColumnName sysname
, @NthHighestNumber int
AS
----- Written by: Narayana Vyas Kondreddi
----- Date written: December 23rd 2000
----- Purpose: To find out the nth highest number in a column. Eg: Second highest salary from the salaries table
----- Input parameters: Table name, Column name, and the nth position
----- Tested on: SQL Server Version 7.0
----- Email: answer_me@hotmail.com
----- URL: http://vyaskn.tripod.com/code/nth.txt
----- Modified on Thursday May 3, 2007 by Vishal Sinha to clean up code
SET NOCOUNT ON
BEGIN
----- Trim spaces in @TableName
SELECT
@TableName = LTRIM(RTRIM(@TableName))
, @ColumnName = LTRIM(RTRIM(@ColumnName))
----- DECLARE variables
DECLARE
@SQL varchar(1000)
----- Check if Table exists
IF (SELECT OBJECT_ID(@TableName, 'U')) IS NULL
BEGIN
RAISERROR('Invalid Table Name', 18, 1)
RETURN -1
END
IF NOT EXISTS(SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = @TableName AND COLUMN_NAME = @ColumnName)
BEGIN
RAISERROR('Invalid Column Name', 18, 1)
RETURN -1
END
IF @NthHighestNumber <= 0
BEGIN
RAISERROR('Nth Highest Number should be greater than Zero', 18, 1)
RETURN -1
END
----- Create SQL Statement
SELECT
@SQL = '
SELECT
MAX(' + @ColumnName + ')
FROM
' + @TableName + '
WHERE
' + @ColumnName + ' NOT IN (
SELECT TOP
' + LTRIM(STR(@NthHighestNumber - 1)) + ' ' + @ColumnName + '
FROM
' + @TableName + '
ORDER BY
' + @ColumnName + ' DESC )'
----- EXECUTE SQL Statement
EXEC (@SQL)
END
Next version of SQL Server is now Official...
Code Name Katmai.
Here is the official release story:
http://www.microsoft.com/sql/prodinfo/futureversion/default.mspx
Here is the official release story:
http://www.microsoft.com/sql/prodinfo/futureversion/default.mspx
Tuesday, May 1, 2007
List of Database Users by Role
----- This gives a list of all users and their Group setup on the server
sp_helpuser
----- Table sysUsers contains list of all users in the database
SELECT * FROM DatabaseName..sysUsers
----- This query will return list of Users by Role
USE DatabaseName
GO
SELECT
SR.Name AS RoleName
, SU.Name AS UserName
FROM
sysUsers AS SU
INNER JOIN sysUsers AS SR ON SU.GID = SR.UID
WHERE
SU.GID <> SU.UID ----- Specifies that the record is not a group but a user
-- AND SU.Name <> 'dbo' ----- If you want to leave out System Users
-- AND SU.Name <> 'Guest' ----- If you want to leave out Guest Accounts
ORDER BY
RoleName
, UserName
sp_helpuser
----- Table sysUsers contains list of all users in the database
SELECT * FROM DatabaseName..sysUsers
----- This query will return list of Users by Role
USE DatabaseName
GO
SELECT
SR.Name AS RoleName
, SU.Name AS UserName
FROM
sysUsers AS SU
INNER JOIN sysUsers AS SR ON SU.GID = SR.UID
WHERE
SU.GID <> SU.UID ----- Specifies that the record is not a group but a user
-- AND SU.Name <> 'dbo' ----- If you want to leave out System Users
-- AND SU.Name <> 'Guest' ----- If you want to leave out Guest Accounts
ORDER BY
RoleName
, UserName
Tuesday, April 24, 2007
SHRINK Database Files with Backup
----- SHRINK Database Files with Backup
----- DECLARE variables
DECLARE
@FileDate varchar(12)
, @DataBackupPath varchar(100)
, @LogBackupPath varchar(10)
, @DatabaseName sysname
----- SET values for variables
SELECT
@FileDate = '200303111800' --Format is yyyymmddhhnn (year, month, day, hour/24, minute)
, @DataBackupPath = 'X:\DataBackupFolder\'
, @LogBackupPath = 'H:\MSSQL\LogBackupFolder\'
, @DatabaseName = DB_NAME()
----- ISSUE CHECKPOINT
CHECKPOINT
----- Get Initial file size
SELECT
(Size * 8)/1024 AS InitialFileSizeMB
, Name AS FileName
FROM
sysFiles
----- Backup Files
EXEC ('BACKUP DATABASE ' + @DatabaseName + ' TO DISK = ''' + @DataBackupPath + @DatabaseName + '_db_' + @FileDate + '.BAK''')
EXEC ('BACKUP LOG ' + @DatabaseName + ' TO DISK = ''' + @LogBackupPath + @DatabaseName + '_tlog_' + @FileDate + '.TRN''')
----- ISSUE CHECKPOINT
CHECKPOINT
----- Shrink Files
----- You may have to modify the SHRINKFILE commands if you use a different naming convention for your logical names.
----- See the names from the sysfiles query above.
EXEC ('DBCC SHRINKFILE (' + @DatabaseName + '_Log, 5)')
EXEC ('BACKUP LOG ' + @DatabaseName + ' TO DISK = ''' + @LogBackupPath + @DatabaseName + '_tlog_' + @FileDate + '.TRN''')
EXEC ('DBCC SHRINKFILE (' + @DatabaseName + '_Log, 5)')
EXEC ('DBCC SHRINKFILE (' + @DatabaseName + '_Data, 5)')
----- Get Final file sizes
SELECT
(Size * 8)/1024 AS NewFileSizeMB
, Name AS FileName
FROM
sysFiles
----- DECLARE variables
DECLARE
@FileDate varchar(12)
, @DataBackupPath varchar(100)
, @LogBackupPath varchar(10)
, @DatabaseName sysname
----- SET values for variables
SELECT
@FileDate = '200303111800' --Format is yyyymmddhhnn (year, month, day, hour/24, minute)
, @DataBackupPath = 'X:\DataBackupFolder\'
, @LogBackupPath = 'H:\MSSQL\LogBackupFolder\'
, @DatabaseName = DB_NAME()
----- ISSUE CHECKPOINT
CHECKPOINT
----- Get Initial file size
SELECT
(Size * 8)/1024 AS InitialFileSizeMB
, Name AS FileName
FROM
sysFiles
----- Backup Files
EXEC ('BACKUP DATABASE ' + @DatabaseName + ' TO DISK = ''' + @DataBackupPath + @DatabaseName + '_db_' + @FileDate + '.BAK''')
EXEC ('BACKUP LOG ' + @DatabaseName + ' TO DISK = ''' + @LogBackupPath + @DatabaseName + '_tlog_' + @FileDate + '.TRN''')
----- ISSUE CHECKPOINT
CHECKPOINT
----- Shrink Files
----- You may have to modify the SHRINKFILE commands if you use a different naming convention for your logical names.
----- See the names from the sysfiles query above.
EXEC ('DBCC SHRINKFILE (' + @DatabaseName + '_Log, 5)')
EXEC ('BACKUP LOG ' + @DatabaseName + ' TO DISK = ''' + @LogBackupPath + @DatabaseName + '_tlog_' + @FileDate + '.TRN''')
EXEC ('DBCC SHRINKFILE (' + @DatabaseName + '_Log, 5)')
EXEC ('DBCC SHRINKFILE (' + @DatabaseName + '_Data, 5)')
----- Get Final file sizes
SELECT
(Size * 8)/1024 AS NewFileSizeMB
, Name AS FileName
FROM
sysFiles
Thursday, April 19, 2007
Backup and Restore Database
----- 1: Backup Database:
BACKUP DATABASE DBName
TO DISK = '\\FilePath\BackupFileName.BAK'
GO
----- 2: Restore Database to existing database with files at same location as backup
RESTORE DATABASE DBName
FROM DISK = 'X:\FilePath\BackupFileName.BAK'
----- 3: Restore Database to new database and move files to new location
RESTORE DATABASE DBName
FROM DISK = 'X:\FilePath\BackupFileName.BAK'
WITH
MOVE 'DataFileLogicalName' TO 'X:\NewPath\DataFileName.MDF'
, MOVE 'LogFileLogicalName' TO 'X:\NewPath\LogFileName.LDF'
----- 4: Restore LiteSpeed database backup
EXEC master.dbo.xp_restore_database
@database = 'DBName'
, @filename = 'X:\FilePath\BackupFileName.BAK'
, @WITH = 'WITH MOVE "DataFileLogicalName" TO "X:\NewPath\DataFileName.MDF"'
, @WITH = 'MOVE "LogFileLogicalName" TO "X:\NewPath\LogFileName.LDF"'
, @EncryptionKey = 'EncryptionKeyValue'
** Use WITH MOVE above if needed.
BACKUP DATABASE DBName
TO DISK = '\\FilePath\BackupFileName.BAK'
GO
----- 2: Restore Database to existing database with files at same location as backup
RESTORE DATABASE DBName
FROM DISK = 'X:\FilePath\BackupFileName.BAK'
----- 3: Restore Database to new database and move files to new location
RESTORE DATABASE DBName
FROM DISK = 'X:\FilePath\BackupFileName.BAK'
WITH
MOVE 'DataFileLogicalName' TO 'X:\NewPath\DataFileName.MDF'
, MOVE 'LogFileLogicalName' TO 'X:\NewPath\LogFileName.LDF'
----- 4: Restore LiteSpeed database backup
EXEC master.dbo.xp_restore_database
@database = 'DBName'
, @filename = 'X:\FilePath\BackupFileName.BAK'
, @WITH = 'WITH MOVE "DataFileLogicalName" TO "X:\NewPath\DataFileName.MDF"'
, @WITH = 'MOVE "LogFileLogicalName" TO "X:\NewPath\LogFileName.LDF"'
, @EncryptionKey = 'EncryptionKeyValue'
** Use WITH MOVE above if needed.
Attach and Detach Databases
----- 1: ATTACH DATABASES
EXEC sp_attach_db @dbname = N'DBName'
, @filename1 = N'X:\LocalfilePath\DataFileName.MDF'
, @filename2 = N'X:\LocalfilePath\LogFileName..LDF'
----- 2: RESTORE DATABASES
RESTORE DATABASE DBName WITH RECOVERY
----- 3: DETACH DATABASES
EXEC sp_detach_db 'DBName', 'TRUE'
To generate Detach Statement for all User Databases on particular SQL Instance:
SET QUOTED_IDENTIFIER OFF
SELECT
"EXEC sp_detach_db '" + Name + "', " +"'TRUE'"
FROM
master..sysdatabases
WHERE
DBID > 4
SET QUOTED_IDENTIFIER ON
Then Execute each Detach Database SQL Statement in Query Analyzer
EXEC sp_attach_db @dbname = N'DBName'
, @filename1 = N'X:\LocalfilePath\DataFileName.MDF'
, @filename2 = N'X:\LocalfilePath\LogFileName..LDF'
----- 2: RESTORE DATABASES
RESTORE DATABASE DBName WITH RECOVERY
----- 3: DETACH DATABASES
EXEC sp_detach_db 'DBName', 'TRUE'
To generate Detach Statement for all User Databases on particular SQL Instance:
SET QUOTED_IDENTIFIER OFF
SELECT
"EXEC sp_detach_db '" + Name + "', " +"'TRUE'"
FROM
master..sysdatabases
WHERE
DBID > 4
SET QUOTED_IDENTIFIER ON
Then Execute each Detach Database SQL Statement in Query Analyzer
SELECT MIN or MAX from multiple UNION Select Statements
My colleague was trying to select the minimum value from output from multiple Select statements that were joined by a UNION join. This MIN value has to be returned as a column as part of another complex SELECT statement.
Don't know if I explained that well but run the following code and you will understand :)
----- SELECT MIN or MAX from multiple UNION Select Statements
SELECT
ColumnList
, (SELECT
MIN(A)
FROM
(SELECT 4 AS A
UNION
SELECT 2 AS A) AS B
) AS AliasName
FROM
.....
Don't know if I explained that well but run the following code and you will understand :)
----- SELECT MIN or MAX from multiple UNION Select Statements
SELECT
ColumnList
, (SELECT
MIN(A)
FROM
(SELECT 4 AS A
UNION
SELECT 2 AS A) AS B
) AS AliasName
FROM
.....
Monday, March 5, 2007
Two Digit Month and Date
You may need this when exporting data to a character based file format, etc.
SELECT
RIGHT('00' + RTRIM(MONTH(GETDATE())), 2) AS DataMonth
, RIGHT('00' + RTRIM(day(GETDATE())), 2) AS DataDate
SELECT
RIGHT('00' + RTRIM(MONTH(GETDATE())), 2) AS DataMonth
, RIGHT('00' + RTRIM(day(GETDATE())), 2) AS DataDate
Add Data File or Move Data File for TempDB
TempDB works slightly differently than other user databases when it comes to adding or moving data files to it. There are occasions when a DBA may need to add new data files to TempDB or move the data files around.
Some reasons to add data files = running out of space in current data files, added new processor to database server or moved to a server with more number of physical processors, etc.
General Rule = Number of data files for TempDB = Number of Physical processors on server.
This rule is true for servers with upto and including 8 physical servers.
All files must be of the same size for SQL server to intelligently use the files.
Here are the steps needed to achieve that.
----- Move Data File for TempDB
USE master
GO
ALTER DATABASE tempdb
MODIFY FILE (NAME = tempdev, FILENAME = 'X:\tempdb.mdf') -- New Location
GO
ALTER DATABASE tempdb
MODIFY FILE (NAME = templog, FILENAME = 'X:\templog.ldf') -- New Location
GO
-- After this stop and restart sql server.
----- Add New Data File to TempDB
ALTER DATABASE tempdb
ADD FILE (NAME = TempDB3
, FILENAME = 'X:\tempdb3.mdf'
, SIZE = 4000) ----- Size should be same as other Data Files in MB.
GO
-- After this stop and restart sql server.
Some reasons to add data files = running out of space in current data files, added new processor to database server or moved to a server with more number of physical processors, etc.
General Rule = Number of data files for TempDB = Number of Physical processors on server.
This rule is true for servers with upto and including 8 physical servers.
All files must be of the same size for SQL server to intelligently use the files.
Here are the steps needed to achieve that.
----- Move Data File for TempDB
USE master
GO
ALTER DATABASE tempdb
MODIFY FILE (NAME = tempdev, FILENAME = 'X:\tempdb.mdf') -- New Location
GO
ALTER DATABASE tempdb
MODIFY FILE (NAME = templog, FILENAME = 'X:\templog.ldf') -- New Location
GO
-- After this stop and restart sql server.
----- Add New Data File to TempDB
ALTER DATABASE tempdb
ADD FILE (NAME = TempDB3
, FILENAME = 'X:\tempdb3.mdf'
, SIZE = 4000) ----- Size should be same as other Data Files in MB.
GO
-- After this stop and restart sql server.
DBCC Commands for SQL DBA
This is a work-in-progress document and will be updated whenever I have more to add to it :)
A good set of DBCC commands that a DBA should use regularly...
1: DBCC CHECKDB
- Checks the allocation, structural, and logical integrity of all the objects in the specified database.
-- Check the current database.
DBCC CHECKDB
GO
-- Check the database without nonclustered indexes.
DBCC CHECKDB ('DatabaseName', NOINDEX);
GO
2: DBCC CHECKTABLE
-- Checks the integrity of all the pages and structures that make up the table or indexed view
-- DBCC CHECKTABLE performs a physical consistency check on a single table or indexed view and
-- all its nonclustered and XML indexes, unless the NOINDEX option is specified.
-- For the specified table, DBCC CHECKTABLE checks for the following:
-- Index, in-row, LOB, and row-overflow data pages are correctly linked.
-- Indexes are in their correct sort order.
-- Pointers are consistent.
-- The data on each page is reasonable, included computed columns.
-- Page offsets are reasonable.
-- Every row in the base table has a matching row in each nonclustered index, and vice-versa.
-- Every row in a partitioned table or index is in the correct partition.
-- DBCC CHECKTABLE uses an internal database snapshot to provide the transactional consistency that it must have to perform these checks.
-- If DBCC CHECKTABLE reports any errors, we recommend restoring the database from the database backup instead of running REPAIR with
-- one of the REPAIR options. If no backup exists, running REPAIR can correct the errors that are reported.
-- The REPAIR option to use is specified at the end of the list of reported errors.
-- However, that correcting the errors by using the REPAIR_ALLOW_DATA_LOSS option might require that some pages, and therefore data, be deleted.
-- The repair can be performed under a user transaction to allow the user to roll back the changes that have been made.
-- If repairs are rolled back, the database will still contain errors and must be restored from a backup. After you have completed all repairs, back up the database.
-- To perform DBCC CHECKTABLE on every table in the database, use DBCC CHECKDB.
USE DatabaseName
GO
DBCC CHECKTABLE ('Owner.TableName')
GO
-- The following example performs a low overhead check of the Employee table in the AdventureWorks database.
USE DatabaseName
GO
DBCC CHECKTABLE ('Owner.TableName') WITH PHYSICAL_ONLY;
GO
3: DBCC DBREINDEX
-- DBCC DBREINDEX enables DBAs to rebuild indexes without having to drop and recreate PRIMARY KEY and UNIQUE constraints
–- Locks the table for the duration of the operation
–- Can offer additional optimizations than a series of individual DROP INDEX and CREATE INDEX statements on a single table
USE DatabaseName
GO
DBCC DBREINDEX ('Owner.TableName', IndexName, FillFactorNumber)
GO
3: DBCC CHECKCATALOG
-- Checks for catalog consistency within the specified database. The database must be online.
-- DBCC CHECKCATALOG will check that every table and view in sysobjects has at least one column
-- in syscolumns and that every data type in syscolumns has a matching entry in systypes.
-- Check the current database.
DBCC CHECKCATALOG
GO
-- Check a particular database.
DBCC CHECKCATALOG ('DatabaseName');
GO
4: DBCC INPUTBUFFER
-- Displays the last statement sent from a client to an instance of SQL Server
DBCC INPUTBUFFER (SPID)
5: DBCC CHECKALLOC
-- Checks the consistency of disk space allocation structures for a specified database.
-- DBCC CHECKALLOC checks the allocation of all pages in the database, regardless of the type of page or type of object to
-- which they belong. It also validates the various internal structures that are used to keep track of these pages
-- and the relationships between them.
-- Check the current database.
DBCC CHECKALLOC
GO
-- Check a particular database.
DBCC CHECKALLOC ('DatabaseName')
GO
-- Other Options available with DBCC CHECKALLOC:
a: REPAIR_ALLOW_DATA_LOSS
Tries to repair any errors that are found. These repairs can cause some data loss. REPAIR_ALLOW_DATA_LOSS is the only option that allows for allocation errors to be repaired
b: REPAIR_FAST
Maintained for backward compatibility only.
Use the REPAIR options only as a last resort. Repair operations do not consider any of the constraints that may exist on or between tables.
If the specified table is involved in one or more constraints, run DBCC CHECKCONSTRAINTS after a repair operation.
If you must use REPAIR, run DBCC CHECKDB to find the repair level to use.
If you are going to use the REPAIR_ALLOW_DATA_LOSS level, back up the database before you run DBCC CHECKDB.
6: DBCC SHOWCONTIG
-- This command shows the degree of contiguous values in a clustered index. If the metrics are poor, then you can drop and recreate the clustered index.
-- You may use either table name and index name, or table ID and index ID numbers.
USE DatabaseName
GO
DBCC SHOWCONTIG (TableName, IndexName)
GO
Results look like this:
/* --- -- */
DBCC SHOWCONTIG scanning 'client' table...
Table: 'client' (767927149); index ID: 1, database ID: 33
TABLE level scan performed.
- Pages Scanned................................: 3333
- Extents Scanned..............................: 436
- Extent Switches..............................: 498
- Avg. Pages per Extent........................: 7.6
- Scan Density [Best Count:Actual Count].......: 83.57% [417:499]
- Logical Scan Fragmentation ..................: 1.29%
- Extent Scan Fragmentation ...................: 40.37%
- Avg. Bytes Free per Page.....................: 681.2
- Avg. Page Density (full).....................: 91.58%
DBCC execution completed. If DBCC printed error messages, contact your system administrator.
/* --- -- */
7: DBCC SHRINKDATABASE
-- Shrinks the size of the data files in the specified database.
-- To shrink all data and log files for a specific database, execute DBCC SHRINKDATABASE.
-- To shrink one data or log file at a time for a specific database, execute DBCC SHRINKFILE.
-- DBCC SHRINKDATABASE and DBCC SHRINKFILE operations can be stopped at any point in the process, and any completed work is retained.
DBCC SHRINKDATABASE (DatabaseName, FreeSpacePercentNumber)
GO
8: DBCC SHRINKFILE
-- Shrinks the size of the specified data file or log file for the related database.
-- The database cannot be made smaller than the size of the model database.
-- Use DBCC SHRINKFILE to shrink a file to a size that is less than the size specified when it was created. This resets the minimum file size to the new value.
-- To remove any data that may be in a file, execute DBCC SHRINKFILE('file_name', EMPTYFILE) before executing ALTER DATABASE.
USE DatabaseName
GO
DBCC SHRINKFILE (DatabaseName, NewFileSizeInMB)
GO
9: DBCC SHOW_STATISTICS
-- Displays the current distribution statistics for the specified target on the specified table.
-- The results returned indicate the selectivity of an index (the lower the density returned, the higher the selectivity)
-- and provide the basis for determining whether an index is useful to the query optimizer.
-- The results returned are based on distribution steps of the index.
-- Result set explained:
-- Updated: The date and time the index statistics were last updated
-- Rows: The total number of rows in the table
-- Rows Sampled: The number of rows sampled for index statistics information
-- Steps: The number of distribution steps
-- Density: The selectivity of the first index column prefix
-- Average key length: The average length of the first index column prefix
-- All density: The selectivity of a set of index column prefixes
-- Average length: The average length of a set of index column prefixes
-- Columns: The names of index column prefixes for which All density and Average length are displayed
-- RANGE_HI_KEY: The upper bound value of a histogram step
-- RANGE_ROWS: The number of rows from the sample that fall within a histogram step, not counting the upper bound
-- EQ_ROWS: The number of rows from the sample that are equal in value to the upper bound of the histogram step
-- DISTINCT_RANGE_ROWS: The number of distinct values within a histogram step, not counting the upper bound
-- AVG_RANGE_ROWS: The average number of duplicate values within a histogram step, not counting the upper bound (where RANGE_ROWS / DISTINCT_RANGE_ROWS for DISTINCT_RANGE_ROWS > 0)
USE DatabaseName
GO
DBCC SHOW_STATISTICS ('Owner.TableName', IndexName)
GO
10: DBCC SQLPERF
-- Provides statistics about how the transaction-log space was used in all databases.
DBCC SQLPERF(LOGSPACE)
GO
11: DBCC PROCCACHE
-- Displays information in a table format about the procedure cache.
-- The procedure cache is used to cache the compiled and executable plans to speed up the execution of batches.
-- The entries in a procedure cache are at a batch level. The procedure cache includes the following entries:
-- Compiled plans
-- Execution plans
-- Algebrizer tree
-- Extended procedures
-- Result Set Explained:
-- num proc buffs = Total number of pages used by all entries in the procedure cache.
-- num proc buffs used = Total number of pages used by all entries that are currently being used.
-- num proc buffs active = For backward compatibility only. Total number of pages used by all entries that are currently being used.
-- proc cache size = Total number of entries in the procedure cache.
-- proc cache used = Total number of entries that are currently being used.
-- proc cache active = For backward compatibility only. Total number of entries that are currently being used.
DBCC PROCCACHE
GO
A good set of DBCC commands that a DBA should use regularly...
1: DBCC CHECKDB
- Checks the allocation, structural, and logical integrity of all the objects in the specified database.
-- Check the current database.
DBCC CHECKDB
GO
-- Check the database without nonclustered indexes.
DBCC CHECKDB ('DatabaseName', NOINDEX);
GO
2: DBCC CHECKTABLE
-- Checks the integrity of all the pages and structures that make up the table or indexed view
-- DBCC CHECKTABLE performs a physical consistency check on a single table or indexed view and
-- all its nonclustered and XML indexes, unless the NOINDEX option is specified.
-- For the specified table, DBCC CHECKTABLE checks for the following:
-- Index, in-row, LOB, and row-overflow data pages are correctly linked.
-- Indexes are in their correct sort order.
-- Pointers are consistent.
-- The data on each page is reasonable, included computed columns.
-- Page offsets are reasonable.
-- Every row in the base table has a matching row in each nonclustered index, and vice-versa.
-- Every row in a partitioned table or index is in the correct partition.
-- DBCC CHECKTABLE uses an internal database snapshot to provide the transactional consistency that it must have to perform these checks.
-- If DBCC CHECKTABLE reports any errors, we recommend restoring the database from the database backup instead of running REPAIR with
-- one of the REPAIR options. If no backup exists, running REPAIR can correct the errors that are reported.
-- The REPAIR option to use is specified at the end of the list of reported errors.
-- However, that correcting the errors by using the REPAIR_ALLOW_DATA_LOSS option might require that some pages, and therefore data, be deleted.
-- The repair can be performed under a user transaction to allow the user to roll back the changes that have been made.
-- If repairs are rolled back, the database will still contain errors and must be restored from a backup. After you have completed all repairs, back up the database.
-- To perform DBCC CHECKTABLE on every table in the database, use DBCC CHECKDB.
USE DatabaseName
GO
DBCC CHECKTABLE ('Owner.TableName')
GO
-- The following example performs a low overhead check of the Employee table in the AdventureWorks database.
USE DatabaseName
GO
DBCC CHECKTABLE ('Owner.TableName') WITH PHYSICAL_ONLY;
GO
3: DBCC DBREINDEX
-- DBCC DBREINDEX enables DBAs to rebuild indexes without having to drop and recreate PRIMARY KEY and UNIQUE constraints
–- Locks the table for the duration of the operation
–- Can offer additional optimizations than a series of individual DROP INDEX and CREATE INDEX statements on a single table
USE DatabaseName
GO
DBCC DBREINDEX ('Owner.TableName', IndexName, FillFactorNumber)
GO
3: DBCC CHECKCATALOG
-- Checks for catalog consistency within the specified database. The database must be online.
-- DBCC CHECKCATALOG will check that every table and view in sysobjects has at least one column
-- in syscolumns and that every data type in syscolumns has a matching entry in systypes.
-- Check the current database.
DBCC CHECKCATALOG
GO
-- Check a particular database.
DBCC CHECKCATALOG ('DatabaseName');
GO
4: DBCC INPUTBUFFER
-- Displays the last statement sent from a client to an instance of SQL Server
DBCC INPUTBUFFER (SPID)
5: DBCC CHECKALLOC
-- Checks the consistency of disk space allocation structures for a specified database.
-- DBCC CHECKALLOC checks the allocation of all pages in the database, regardless of the type of page or type of object to
-- which they belong. It also validates the various internal structures that are used to keep track of these pages
-- and the relationships between them.
-- Check the current database.
DBCC CHECKALLOC
GO
-- Check a particular database.
DBCC CHECKALLOC ('DatabaseName')
GO
-- Other Options available with DBCC CHECKALLOC:
a: REPAIR_ALLOW_DATA_LOSS
Tries to repair any errors that are found. These repairs can cause some data loss. REPAIR_ALLOW_DATA_LOSS is the only option that allows for allocation errors to be repaired
b: REPAIR_FAST
Maintained for backward compatibility only.
Use the REPAIR options only as a last resort. Repair operations do not consider any of the constraints that may exist on or between tables.
If the specified table is involved in one or more constraints, run DBCC CHECKCONSTRAINTS after a repair operation.
If you must use REPAIR, run DBCC CHECKDB to find the repair level to use.
If you are going to use the REPAIR_ALLOW_DATA_LOSS level, back up the database before you run DBCC CHECKDB.
6: DBCC SHOWCONTIG
-- This command shows the degree of contiguous values in a clustered index. If the metrics are poor, then you can drop and recreate the clustered index.
-- You may use either table name and index name, or table ID and index ID numbers.
USE DatabaseName
GO
DBCC SHOWCONTIG (TableName, IndexName)
GO
Results look like this:
/* --- -- */
DBCC SHOWCONTIG scanning 'client' table...
Table: 'client' (767927149); index ID: 1, database ID: 33
TABLE level scan performed.
- Pages Scanned................................: 3333
- Extents Scanned..............................: 436
- Extent Switches..............................: 498
- Avg. Pages per Extent........................: 7.6
- Scan Density [Best Count:Actual Count].......: 83.57% [417:499]
- Logical Scan Fragmentation ..................: 1.29%
- Extent Scan Fragmentation ...................: 40.37%
- Avg. Bytes Free per Page.....................: 681.2
- Avg. Page Density (full).....................: 91.58%
DBCC execution completed. If DBCC printed error messages, contact your system administrator.
/* --- -- */
7: DBCC SHRINKDATABASE
-- Shrinks the size of the data files in the specified database.
-- To shrink all data and log files for a specific database, execute DBCC SHRINKDATABASE.
-- To shrink one data or log file at a time for a specific database, execute DBCC SHRINKFILE.
-- DBCC SHRINKDATABASE and DBCC SHRINKFILE operations can be stopped at any point in the process, and any completed work is retained.
DBCC SHRINKDATABASE (DatabaseName, FreeSpacePercentNumber)
GO
8: DBCC SHRINKFILE
-- Shrinks the size of the specified data file or log file for the related database.
-- The database cannot be made smaller than the size of the model database.
-- Use DBCC SHRINKFILE to shrink a file to a size that is less than the size specified when it was created. This resets the minimum file size to the new value.
-- To remove any data that may be in a file, execute DBCC SHRINKFILE('file_name', EMPTYFILE) before executing ALTER DATABASE.
USE DatabaseName
GO
DBCC SHRINKFILE (DatabaseName, NewFileSizeInMB)
GO
9: DBCC SHOW_STATISTICS
-- Displays the current distribution statistics for the specified target on the specified table.
-- The results returned indicate the selectivity of an index (the lower the density returned, the higher the selectivity)
-- and provide the basis for determining whether an index is useful to the query optimizer.
-- The results returned are based on distribution steps of the index.
-- Result set explained:
-- Updated: The date and time the index statistics were last updated
-- Rows: The total number of rows in the table
-- Rows Sampled: The number of rows sampled for index statistics information
-- Steps: The number of distribution steps
-- Density: The selectivity of the first index column prefix
-- Average key length: The average length of the first index column prefix
-- All density: The selectivity of a set of index column prefixes
-- Average length: The average length of a set of index column prefixes
-- Columns: The names of index column prefixes for which All density and Average length are displayed
-- RANGE_HI_KEY: The upper bound value of a histogram step
-- RANGE_ROWS: The number of rows from the sample that fall within a histogram step, not counting the upper bound
-- EQ_ROWS: The number of rows from the sample that are equal in value to the upper bound of the histogram step
-- DISTINCT_RANGE_ROWS: The number of distinct values within a histogram step, not counting the upper bound
-- AVG_RANGE_ROWS: The average number of duplicate values within a histogram step, not counting the upper bound (where RANGE_ROWS / DISTINCT_RANGE_ROWS for DISTINCT_RANGE_ROWS > 0)
USE DatabaseName
GO
DBCC SHOW_STATISTICS ('Owner.TableName', IndexName)
GO
10: DBCC SQLPERF
-- Provides statistics about how the transaction-log space was used in all databases.
DBCC SQLPERF(LOGSPACE)
GO
11: DBCC PROCCACHE
-- Displays information in a table format about the procedure cache.
-- The procedure cache is used to cache the compiled and executable plans to speed up the execution of batches.
-- The entries in a procedure cache are at a batch level. The procedure cache includes the following entries:
-- Compiled plans
-- Execution plans
-- Algebrizer tree
-- Extended procedures
-- Result Set Explained:
-- num proc buffs = Total number of pages used by all entries in the procedure cache.
-- num proc buffs used = Total number of pages used by all entries that are currently being used.
-- num proc buffs active = For backward compatibility only. Total number of pages used by all entries that are currently being used.
-- proc cache size = Total number of entries in the procedure cache.
-- proc cache used = Total number of entries that are currently being used.
-- proc cache active = For backward compatibility only. Total number of entries that are currently being used.
DBCC PROCCACHE
GO
Tuesday, February 13, 2007
Pattern match for data in column
Sometimes I need to make sure that data in a column does not contain any letters or numbers. I use the following code to match patterns in text...
----- Pattern match
----- PATINDEX returns first position where match happens
SELECT PATINDEX( '%abc%', '123456abc123')
SELECT PATINDEX( '%abc%', ColumnName)
----- To check if the text or column contains any letters
SELECT PATINDEX( '%[A-Z]%', '123456q')
----- To check if the text or column contains any number
SELECT PATINDEX( '%[0-9]%', 'abcde1fg')
----- Pattern match
----- PATINDEX returns first position where match happens
SELECT PATINDEX( '%abc%', '123456abc123')
SELECT PATINDEX( '%abc%', ColumnName)
----- To check if the text or column contains any letters
SELECT PATINDEX( '%[A-Z]%', '123456q')
----- To check if the text or column contains any number
SELECT PATINDEX( '%[0-9]%', 'abcde1fg')
Execute Some SQL Statement on All databases
Many a times I need to run the same SQL statement (like DBCC statements or sp_UpdateStats) on all databases of the server. Instead of having to write code manually for all databases, i use this script that does the work for me...
** Please change value for @SQL to desired SQL code.
----- EXECUTE SQL Statement on all Databases
DECLARE
@DatabaseName nvarchar(100)
, @SQL nvarchar(500)
DECLARE DBNameCursor CURSOR FOR
SELECT
[Name] AS DatabaseName
FROM
master.dbo.sysdatabases
ORDER BY
DatabaseName
OPEN DBNameCursor
FETCH NEXT FROM DBNameCursor
INTO @DatabaseName
WHILE @@FETCH_STATUS = 0
BEGIN
----- SQL Statement to be run
--EXEC dbo.sp_UpdateSats
SELECT
@SQL = 'EXEC dbo.sp_updatestats '
--EXEC sp_executeSQL @SQL
EXEC sp_executeSQL @SQL
FETCH NEXT FROM DBNameCursor
INTO @DatabaseName
END
CLOSE DBNameCursor
DEALLOCATE DBNameCursor
** Please change value for @SQL to desired SQL code.
----- EXECUTE SQL Statement on all Databases
DECLARE
@DatabaseName nvarchar(100)
, @SQL nvarchar(500)
DECLARE DBNameCursor CURSOR FOR
SELECT
[Name] AS DatabaseName
FROM
master.dbo.sysdatabases
ORDER BY
DatabaseName
OPEN DBNameCursor
FETCH NEXT FROM DBNameCursor
INTO @DatabaseName
WHILE @@FETCH_STATUS = 0
BEGIN
----- SQL Statement to be run
--EXEC dbo.sp_UpdateSats
SELECT
@SQL = 'EXEC dbo.sp_updatestats '
--EXEC sp_executeSQL @SQL
EXEC sp_executeSQL @SQL
FETCH NEXT FROM DBNameCursor
INTO @DatabaseName
END
CLOSE DBNameCursor
DEALLOCATE DBNameCursor
SQL Jobs Failed in the Past 24 hours
----- SQL Jobs Failed in the Past 24 hours.
----- This will create a SQL Job that will send out an email with a list of Jobs that have failed over the past 24 hours.
/*
PLEASE SET VALUES FOR THE FOLLOWING PARAMETERS:
@SMTPServerIP = ''SMTP-SERVER-IP-ADDRESS''
@FromEmailAddress= ''FROM-EMAIL-ADDRESS''
@FromName = ''FROM-NAME''
@EmailSubject = ''Jobs failed on SQL Server'' - Change this to what you want it to be.
@ToEmailAddress = ''TO-EMAIL-ADDRESS'' - Semi-colon seperated string
@AttachmentFile = ''C:\jobs.txt'' - Make sure this path exists
*/
USE msdb
GO
BEGIN TRANSACTION
DECLARE
@JobID BINARY(16)
, @ReturnCode INT
SELECT
@ReturnCode = 0
IF (SELECT COUNT(*) FROM msdb.dbo.syscategories WHERE name = N'[Uncategorized (Local)]') < 1
EXECUTE msdb.dbo.sp_add_category @name = N'[Uncategorized (Local)]'
-- Delete the job with the same name (if it exists)
SELECT
@JobID = job_id
FROM
msdb.dbo.sysjobs
WHERE
(name = N'JobsFailedToday')
IF (@JobID IS NOT NULL)
BEGIN
-- Check if the job is a multi-server job
IF (EXISTS (SELECT * FROM msdb.dbo.sysjobservers WHERE (job_id = @JobID) AND (server_id <> 0)))
BEGIN
-- There is, so abort the script
RAISERROR (N'Unable to import job ''JobsFailedToday'' since there is already a multi-server job with this name.', 16, 1)
GOTO QuitWithRollback
END
ELSE
-- Delete the [local] job
EXECUTE msdb.dbo.sp_delete_job @job_name = N'JobsFailedToday'
SELECT
@JobID = NULL
END
BEGIN
-- Add the job
EXECUTE @ReturnCode = msdb.dbo.sp_add_job
@job_id = @JobID OUTPUT
, @job_name = N'Jobs Failed Today'
, @owner_login_name = N'sa'
, @description = N'List of all SQL Jobs that failed today.'
, @category_name = N'[Uncategorized (Local)]'
, @enabled = 1
, @notify_level_email = 0
, @notify_level_page = 0
, @notify_level_netsend = 0
, @notify_level_eventlog = 2
, @delete_level= 0
IF (@@ERROR <> 0 OR @ReturnCode <> 0)
GOTO QuitWithRollback
-- Add the job steps
EXECUTE @ReturnCode = msdb.dbo.sp_add_jobstep
@job_id = @JobID
, @step_id = 1
, @step_name = N'Jobs Failed Today'
, @command = N'-- ALL SQL Code
DECLARE
@Month varchar(2)
, @LDate varchar(15)
SELECT
@Month = CONVERT(varchar, month(GETDATE()))
IF LEN(@Month) < 2
BEGIN
SELECT
@LDate = CONVERT(varchar, YEAR(GETDATE())) + ''0'' + CONVERT(varchar, month(GETDATE())) + CONVERT(varchar, day(GETDATE()))
END
ELSE
BEGIN
SELECT
@LDate = CONVERT(varchar, YEAR(GETDATE())) + CONVERT(varchar, month(GETDATE())) + CONVERT(varchar, day(GETDATE()))
END
----- Send Email from SQL Server without using Outlook
----- Step 1: DECLARE variables
DECLARE
@SMTPServerIP varchar(16)
, @FromEmailAddress varchar(100)
, @FromName varchar(100)
, @ToEmailAddress varchar(300)
, @SQL varchar(8000)
, @SQLQuery varchar(2000)
, @EmailSubject varchar(1000)
, @AttachmentFile varchar(200)
----- Step 2: SET value for variables
SELECT
@SMTPServerIP = ''SMTP-SERVER-IP-ADDRESS''
, @FromEmailAddress= ''FROM-EMAIL-ADDRESS''
, @FromName = ''FROM-NAME''
, @EmailSubject = ''Jobs failed on SQL Server''
, @ToEmailAddress = ''TO-EMAIL-ADDRESS'' ----- Semi-colon seperated string
, @AttachmentFile = ''C:\jobs.txt'' ----- Local Path to SQL Server
----- Step 3: Define query to be run for Email message if needed
SELECT
@SQLQuery = ''SELECT Name AS JobName FROM msdb.dbo.sysjobs A, msdb.dbo.sysjobservers B '' +
'' WHERE A.job_id = B.job_id AND B.last_run_outcome = 0 AND last_run_date = '' + CONVERT(varchar, @LDate)
----- Step 4: Define final SQL Statement
SELECT
@SQL = ''osql -E-w500 -Q "'' + @SQLQuery + ''" -o "C:\Jobs.txt"''
----- Step 5: EXECUTE SQL Statement
EXEC master.dbo.xp_cmdshell @SQL
----- Step 6: Send File only if there is some data in the file
----- Step 6:a: Create temporary table that will hold details for xp_getfiledetails results
CREATE TABLE #FileDetails (
AlternateName varchar(33)
, Size int
, CreationDate int
, CreationTime int
, LastWrittenDate int
, LastWrittenTime int
, LastAccessedDate int
, LastAccessedTime int
, Attributes int
)
----- Step 6:b: INSERT data in temporary table
INSERT INTO #FileDetails
EXEC master.dbo.xp_getfiledetails ''C:\jobs.txt''
IF (SELECT Size FROM #FileDetails) > 0
BEGIN
----- Step 6:c: Send Email with File
EXEC master.dbo.xp_SMTP_SendMail
@From = @FromEmailAddress
, @From_Name = @FromName
, @To = @ToEmailAddress
, @Subject = @EmailSubject
, @MessageFile = @AttachmentFile
, @Server = @SMTPServerIP
END
ELSE
BEGIN
----- Step 6:d: Send Email with File
EXEC master.dbo.xp_SMTP_SendMail
@From = @FromEmailAddress
, @From_Name = @FromName
, @To = @ToEmailAddress
, @Subject = @EmailSubject
, @Message = ''No Jobs Failed Today''
, @Server = @SMTPServerIP
END
----- Step 6:e: DROP temporary table
DROP TABLE #FileDetails
GO '
, @database_name = N'master'
, @server = N''
, @database_user_name = N''
, @subsystem = N'TSQL'
, @cmdexec_success_code = 0
, @flags = 0
, @retry_attempts = 0
, @retry_interval = 1
, @output_file_name = N''
, @on_success_step_id = 0
, @on_success_action = 1
, @on_fail_step_id = 0
, @on_fail_action = 2
IF (@@ERROR <> 0 OR @ReturnCode <> 0)
GOTO QuitWithRollback
EXECUTE @ReturnCode = msdb.dbo.sp_update_job
@job_id = @JobID
, @start_step_id = 1
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
-- Add the job schedules
EXECUTE @ReturnCode = msdb.dbo.sp_add_jobschedule
@job_id = @JobID
, @name = N'Jobs Failed Today'
, @enabled = 1
, @freq_type = 8
, @active_start_date = 20060101
, @active_start_time = 74500
, @freq_interval = 127
, @freq_subday_type = 1
, @freq_subday_interval = 0
, @freq_relative_interval = 0
, @freq_recurrence_factor = 1
, @active_end_date = 99991231
, @active_end_time = 235959
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
-- Add the Target Servers
EXECUTE @ReturnCode = msdb.dbo.sp_add_jobserver
@job_id = @JobID
, @server_name = N'(local)'
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
END
COMMIT TRANSACTION
GOTO EndSave
QuitWithRollback:
IF (@@TRANCOUNT > 0) ROLLBACK TRANSACTION
EndSave:
----- This will create a SQL Job that will send out an email with a list of Jobs that have failed over the past 24 hours.
/*
PLEASE SET VALUES FOR THE FOLLOWING PARAMETERS:
@SMTPServerIP = ''SMTP-SERVER-IP-ADDRESS''
@FromEmailAddress= ''FROM-EMAIL-ADDRESS''
@FromName = ''FROM-NAME''
@EmailSubject = ''Jobs failed on SQL Server'' - Change this to what you want it to be.
@ToEmailAddress = ''TO-EMAIL-ADDRESS'' - Semi-colon seperated string
@AttachmentFile = ''C:\jobs.txt'' - Make sure this path exists
*/
USE msdb
GO
BEGIN TRANSACTION
DECLARE
@JobID BINARY(16)
, @ReturnCode INT
SELECT
@ReturnCode = 0
IF (SELECT COUNT(*) FROM msdb.dbo.syscategories WHERE name = N'[Uncategorized (Local)]') < 1
EXECUTE msdb.dbo.sp_add_category @name = N'[Uncategorized (Local)]'
-- Delete the job with the same name (if it exists)
SELECT
@JobID = job_id
FROM
msdb.dbo.sysjobs
WHERE
(name = N'JobsFailedToday')
IF (@JobID IS NOT NULL)
BEGIN
-- Check if the job is a multi-server job
IF (EXISTS (SELECT * FROM msdb.dbo.sysjobservers WHERE (job_id = @JobID) AND (server_id <> 0)))
BEGIN
-- There is, so abort the script
RAISERROR (N'Unable to import job ''JobsFailedToday'' since there is already a multi-server job with this name.', 16, 1)
GOTO QuitWithRollback
END
ELSE
-- Delete the [local] job
EXECUTE msdb.dbo.sp_delete_job @job_name = N'JobsFailedToday'
SELECT
@JobID = NULL
END
BEGIN
-- Add the job
EXECUTE @ReturnCode = msdb.dbo.sp_add_job
@job_id = @JobID OUTPUT
, @job_name = N'Jobs Failed Today'
, @owner_login_name = N'sa'
, @description = N'List of all SQL Jobs that failed today.'
, @category_name = N'[Uncategorized (Local)]'
, @enabled = 1
, @notify_level_email = 0
, @notify_level_page = 0
, @notify_level_netsend = 0
, @notify_level_eventlog = 2
, @delete_level= 0
IF (@@ERROR <> 0 OR @ReturnCode <> 0)
GOTO QuitWithRollback
-- Add the job steps
EXECUTE @ReturnCode = msdb.dbo.sp_add_jobstep
@job_id = @JobID
, @step_id = 1
, @step_name = N'Jobs Failed Today'
, @command = N'-- ALL SQL Code
DECLARE
@Month varchar(2)
, @LDate varchar(15)
SELECT
@Month = CONVERT(varchar, month(GETDATE()))
IF LEN(@Month) < 2
BEGIN
SELECT
@LDate = CONVERT(varchar, YEAR(GETDATE())) + ''0'' + CONVERT(varchar, month(GETDATE())) + CONVERT(varchar, day(GETDATE()))
END
ELSE
BEGIN
SELECT
@LDate = CONVERT(varchar, YEAR(GETDATE())) + CONVERT(varchar, month(GETDATE())) + CONVERT(varchar, day(GETDATE()))
END
----- Send Email from SQL Server without using Outlook
----- Step 1: DECLARE variables
DECLARE
@SMTPServerIP varchar(16)
, @FromEmailAddress varchar(100)
, @FromName varchar(100)
, @ToEmailAddress varchar(300)
, @SQL varchar(8000)
, @SQLQuery varchar(2000)
, @EmailSubject varchar(1000)
, @AttachmentFile varchar(200)
----- Step 2: SET value for variables
SELECT
@SMTPServerIP = ''SMTP-SERVER-IP-ADDRESS''
, @FromEmailAddress= ''FROM-EMAIL-ADDRESS''
, @FromName = ''FROM-NAME''
, @EmailSubject = ''Jobs failed on SQL Server''
, @ToEmailAddress = ''TO-EMAIL-ADDRESS'' ----- Semi-colon seperated string
, @AttachmentFile = ''C:\jobs.txt'' ----- Local Path to SQL Server
----- Step 3: Define query to be run for Email message if needed
SELECT
@SQLQuery = ''SELECT Name AS JobName FROM msdb.dbo.sysjobs A, msdb.dbo.sysjobservers B '' +
'' WHERE A.job_id = B.job_id AND B.last_run_outcome = 0 AND last_run_date = '' + CONVERT(varchar, @LDate)
----- Step 4: Define final SQL Statement
SELECT
@SQL = ''osql -E-w500 -Q "'' + @SQLQuery + ''" -o "C:\Jobs.txt"''
----- Step 5: EXECUTE SQL Statement
EXEC master.dbo.xp_cmdshell @SQL
----- Step 6: Send File only if there is some data in the file
----- Step 6:a: Create temporary table that will hold details for xp_getfiledetails results
CREATE TABLE #FileDetails (
AlternateName varchar(33)
, Size int
, CreationDate int
, CreationTime int
, LastWrittenDate int
, LastWrittenTime int
, LastAccessedDate int
, LastAccessedTime int
, Attributes int
)
----- Step 6:b: INSERT data in temporary table
INSERT INTO #FileDetails
EXEC master.dbo.xp_getfiledetails ''C:\jobs.txt''
IF (SELECT Size FROM #FileDetails) > 0
BEGIN
----- Step 6:c: Send Email with File
EXEC master.dbo.xp_SMTP_SendMail
@From = @FromEmailAddress
, @From_Name = @FromName
, @To = @ToEmailAddress
, @Subject = @EmailSubject
, @MessageFile = @AttachmentFile
, @Server = @SMTPServerIP
END
ELSE
BEGIN
----- Step 6:d: Send Email with File
EXEC master.dbo.xp_SMTP_SendMail
@From = @FromEmailAddress
, @From_Name = @FromName
, @To = @ToEmailAddress
, @Subject = @EmailSubject
, @Message = ''No Jobs Failed Today''
, @Server = @SMTPServerIP
END
----- Step 6:e: DROP temporary table
DROP TABLE #FileDetails
GO '
, @database_name = N'master'
, @server = N''
, @database_user_name = N''
, @subsystem = N'TSQL'
, @cmdexec_success_code = 0
, @flags = 0
, @retry_attempts = 0
, @retry_interval = 1
, @output_file_name = N''
, @on_success_step_id = 0
, @on_success_action = 1
, @on_fail_step_id = 0
, @on_fail_action = 2
IF (@@ERROR <> 0 OR @ReturnCode <> 0)
GOTO QuitWithRollback
EXECUTE @ReturnCode = msdb.dbo.sp_update_job
@job_id = @JobID
, @start_step_id = 1
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
-- Add the job schedules
EXECUTE @ReturnCode = msdb.dbo.sp_add_jobschedule
@job_id = @JobID
, @name = N'Jobs Failed Today'
, @enabled = 1
, @freq_type = 8
, @active_start_date = 20060101
, @active_start_time = 74500
, @freq_interval = 127
, @freq_subday_type = 1
, @freq_subday_interval = 0
, @freq_relative_interval = 0
, @freq_recurrence_factor = 1
, @active_end_date = 99991231
, @active_end_time = 235959
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
-- Add the Target Servers
EXECUTE @ReturnCode = msdb.dbo.sp_add_jobserver
@job_id = @JobID
, @server_name = N'(local)'
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
END
COMMIT TRANSACTION
GOTO EndSave
QuitWithRollback:
IF (@@TRANCOUNT > 0) ROLLBACK TRANSACTION
EndSave:
Tuesday, February 6, 2007
Column List for Table
I know you can use sp_help TableName for this but sometimes you just want a list with only column names.
SELECT
C.Ordinal_Position
, C.Column_Name
FROM
Information_Schema.Columns AS C
WHERE
C.Table_Name = 'TableName'
ORDER BY
C.Ordinal_Position
SELECT
C.Ordinal_Position
, C.Column_Name
FROM
Information_Schema.Columns AS C
WHERE
C.Table_Name = 'TableName'
ORDER BY
C.Ordinal_Position
Number of Work Days in a Date Range.
--===== Change current database to the Master database.
-- Allows function to be shared by everyone
USE MASTER
GO
--===== If the function already exists, drop it
IF EXISTS (
SELECT *
FROM dbo.SYSOBJECTS
WHERE ID = OBJECT_ID(N'[dbo].[fnWorkDays]')
AND XType IN (N'FN', N'IF', N'TF')
)
DROP FUNCTION [dbo].[fnWorkDays]
GO
CREATE FUNCTION dbo.fnWorkDays
/***************************************************************************************
Purpose:
1. Given any valid start date and end date, this function will calculate and return
the number of workdays (Mon - Fri).
2. Given only a valid start date (end date has DEFAULT in it), this function will
return a 1 if the start date is a weekday and a 0 if not a weekday.
Usage:
1. MASTER.dbo.fnWorkDays(@StartDate,@EndDate)
2. MASTER.dbo.fnWorkDays(@StartDate,DEFAULT) --Always returns 1 or 0
3. MASTER.dbo.fnWorkDays(@EndDate,@StartDate)
4. MASTER.dbo.fnWorkDays(@StartDate,@StartDate) --Always returns 1 or 0
5. MASTER.dbo.fnWorkDays(@EndDate,@EndDate) --Always returns 1 or 0
Notes:
1. Holidays are NOT considered.
2. Because of the way SQL Server calculates weeks and named days of the week, no
special consideration for the value of DATEFIRST is given. In other words, it
doesn't matter what DATEFIRST is set to for this function.
3. If the input dates are in the incorrect order, they will be reversed prior to any
calculations.
4. Only whole days are considered. Times are NOT used.
5. The number of workdays INCLUDES both dates
6. Inputs may be literal representations of dates, datetime datatypes, numbers that
represent the number of days since 1/1/1900 00:00:00.000, or anything else that can
be implicitly converted to or already is a datetime datatype.
7. Undocumented: The DATEPART(dw,date) does not actually count weeks... It counts the
transition to a Sunday regardless of the DATEFIRST setting. In essence, it counts
only whole weekends in any given date range.
8. This UDF does NOT create a tally table or sequence table to operate. Not only is
it set based, it is truly "tableless".
Error Indications:
1. If either the @StartDate or the @EndDate parameter is an invalid date, the
following error is returned...
"Server: Msg 242, Level 16, State 3, Line 3
The conversion of a char data type to a datetime data type resulted in an
out-of-range datetime value."
2. If either the @StartDate or the @EndDate parameter is a string not resembling a
date, the following error is returned...
"Server: Msg 241, Level 16, State 1, Line 3
Syntax error converting datetime from character string."
3. If only one parameter is passed, the following error is returned...
"Server: Msg 313, Level 16, State 2, Line 3
An insufficient number of arguments were supplied for the procedure or
function MASTER.dbo.fnWorkDays."
Revisions:
Rev 00 - 12/12/2004 - Jeff Moden - Initial creation and test.
Rev 01 - 12/12/2004 - Jeff Moden - Load test, cleanup, document, release.
Rev 02 - 12/26/2004 - Jeff Moden - Return NULL if @StartDate is NULL or DEFAULT and
modify to be insensitive to DATEFIRST settings.
***************************************************************************************/
--======================================================================================
-- Presets
--======================================================================================
--===== Define the input parameters (ok if reversed by mistake)
(
@StartDate DATETIME,
@EndDate DATETIME = NULL --@EndDate replaced by @StartDate when DEFAULTed
)
--===== Define the output data type
RETURNS INT
AS
--======================================================================================
-- Calculate the RETURN of the function
--======================================================================================
BEGIN
--===== Declare local variables
--Temporarily holds @EndDate during date reversal
DECLARE @Swap DATETIME
--===== If the Start Date is null, return a NULL and exit
IF @StartDate IS NULL
RETURN NULL
--===== If the End Date is null, populate with Start Date value
-- so will have two dates (required by DATEDIFF below)
IF @EndDate IS NULL
SELECT @EndDate = @StartDate
--===== Strip the time element from both dates (just to be safe) by converting
-- to whole days and back to a date. Usually faster than CONVERT.
-- 0 is a date (01/01/1900 00:00:00.000)
SELECT @StartDate = DATEADD(dd,DATEDIFF(dd,0,@StartDate),0),
@EndDate = DATEADD(dd,DATEDIFF(dd,0,@EndDate) ,0)
--===== If the inputs are in the wrong order, reverse them
IF @StartDate > @EndDate
SELECT @Swap = @EndDate,
@EndDate = @StartDate,
@StartDate = @Swap
--===== Calculate and return the number of workdays using the
-- input parameters. This is the meat of the function.
-- This is really just one formula with a couple of parts
-- that are listed on separate lines for documentation
-- purposes.
RETURN (
SELECT
--Start with total number of days including weekends
(DATEDIFF(dd,@StartDate,@EndDate)+1)
--Subtact 2 days for each full weekend
-(DATEDIFF(wk,@StartDate,@EndDate)*2)
--If StartDate is a Sunday, Subtract 1
-(CASE WHEN DATENAME(dw,@StartDate) = 'Sunday'
THEN 1
ELSE 0
END)
--If EndDate is a Saturday, Subtract 1
-(CASE WHEN DATENAME(dw,@EndDate) = 'Saturday'
THEN 1
ELSE 0
END)
)
END
-- Allows function to be shared by everyone
USE MASTER
GO
--===== If the function already exists, drop it
IF EXISTS (
SELECT *
FROM dbo.SYSOBJECTS
WHERE ID = OBJECT_ID(N'[dbo].[fnWorkDays]')
AND XType IN (N'FN', N'IF', N'TF')
)
DROP FUNCTION [dbo].[fnWorkDays]
GO
CREATE FUNCTION dbo.fnWorkDays
/***************************************************************************************
Purpose:
1. Given any valid start date and end date, this function will calculate and return
the number of workdays (Mon - Fri).
2. Given only a valid start date (end date has DEFAULT in it), this function will
return a 1 if the start date is a weekday and a 0 if not a weekday.
Usage:
1. MASTER.dbo.fnWorkDays(@StartDate,@EndDate)
2. MASTER.dbo.fnWorkDays(@StartDate,DEFAULT) --Always returns 1 or 0
3. MASTER.dbo.fnWorkDays(@EndDate,@StartDate)
4. MASTER.dbo.fnWorkDays(@StartDate,@StartDate) --Always returns 1 or 0
5. MASTER.dbo.fnWorkDays(@EndDate,@EndDate) --Always returns 1 or 0
Notes:
1. Holidays are NOT considered.
2. Because of the way SQL Server calculates weeks and named days of the week, no
special consideration for the value of DATEFIRST is given. In other words, it
doesn't matter what DATEFIRST is set to for this function.
3. If the input dates are in the incorrect order, they will be reversed prior to any
calculations.
4. Only whole days are considered. Times are NOT used.
5. The number of workdays INCLUDES both dates
6. Inputs may be literal representations of dates, datetime datatypes, numbers that
represent the number of days since 1/1/1900 00:00:00.000, or anything else that can
be implicitly converted to or already is a datetime datatype.
7. Undocumented: The DATEPART(dw,date) does not actually count weeks... It counts the
transition to a Sunday regardless of the DATEFIRST setting. In essence, it counts
only whole weekends in any given date range.
8. This UDF does NOT create a tally table or sequence table to operate. Not only is
it set based, it is truly "tableless".
Error Indications:
1. If either the @StartDate or the @EndDate parameter is an invalid date, the
following error is returned...
"Server: Msg 242, Level 16, State 3, Line 3
The conversion of a char data type to a datetime data type resulted in an
out-of-range datetime value."
2. If either the @StartDate or the @EndDate parameter is a string not resembling a
date, the following error is returned...
"Server: Msg 241, Level 16, State 1, Line 3
Syntax error converting datetime from character string."
3. If only one parameter is passed, the following error is returned...
"Server: Msg 313, Level 16, State 2, Line 3
An insufficient number of arguments were supplied for the procedure or
function MASTER.dbo.fnWorkDays."
Revisions:
Rev 00 - 12/12/2004 - Jeff Moden - Initial creation and test.
Rev 01 - 12/12/2004 - Jeff Moden - Load test, cleanup, document, release.
Rev 02 - 12/26/2004 - Jeff Moden - Return NULL if @StartDate is NULL or DEFAULT and
modify to be insensitive to DATEFIRST settings.
***************************************************************************************/
--======================================================================================
-- Presets
--======================================================================================
--===== Define the input parameters (ok if reversed by mistake)
(
@StartDate DATETIME,
@EndDate DATETIME = NULL --@EndDate replaced by @StartDate when DEFAULTed
)
--===== Define the output data type
RETURNS INT
AS
--======================================================================================
-- Calculate the RETURN of the function
--======================================================================================
BEGIN
--===== Declare local variables
--Temporarily holds @EndDate during date reversal
DECLARE @Swap DATETIME
--===== If the Start Date is null, return a NULL and exit
IF @StartDate IS NULL
RETURN NULL
--===== If the End Date is null, populate with Start Date value
-- so will have two dates (required by DATEDIFF below)
IF @EndDate IS NULL
SELECT @EndDate = @StartDate
--===== Strip the time element from both dates (just to be safe) by converting
-- to whole days and back to a date. Usually faster than CONVERT.
-- 0 is a date (01/01/1900 00:00:00.000)
SELECT @StartDate = DATEADD(dd,DATEDIFF(dd,0,@StartDate),0),
@EndDate = DATEADD(dd,DATEDIFF(dd,0,@EndDate) ,0)
--===== If the inputs are in the wrong order, reverse them
IF @StartDate > @EndDate
SELECT @Swap = @EndDate,
@EndDate = @StartDate,
@StartDate = @Swap
--===== Calculate and return the number of workdays using the
-- input parameters. This is the meat of the function.
-- This is really just one formula with a couple of parts
-- that are listed on separate lines for documentation
-- purposes.
RETURN (
SELECT
--Start with total number of days including weekends
(DATEDIFF(dd,@StartDate,@EndDate)+1)
--Subtact 2 days for each full weekend
-(DATEDIFF(wk,@StartDate,@EndDate)*2)
--If StartDate is a Sunday, Subtract 1
-(CASE WHEN DATENAME(dw,@StartDate) = 'Sunday'
THEN 1
ELSE 0
END)
--If EndDate is a Saturday, Subtract 1
-(CASE WHEN DATENAME(dw,@EndDate) = 'Saturday'
THEN 1
ELSE 0
END)
)
END
Person's Age in Years
This simple script will reveal (your) age in Years:
SELECT
@DateOfBirth = '6/24/1973'
, @Today = GETDATE()
SELECT
((((YEAR(@Today) * 365) + DATEPART(dy, @Today))
- ((YEAR(@DateOfBirth) * 365) + DATEPART(dy, @DateOfBirth)))
/ 365) AS AgeInYears
SELECT
@DateOfBirth = '6/24/1973'
, @Today = GETDATE()
SELECT
((((YEAR(@Today) * 365) + DATEPART(dy, @Today))
- ((YEAR(@DateOfBirth) * 365) + DATEPART(dy, @DateOfBirth)))
/ 365) AS AgeInYears
Monday, January 22, 2007
Assign Rank to SELECT List
Assign Rank to SELECT List
SELECT
Rank = (
SELECT
COUNT(DISTINCT T2.ColumnName)
FROM
dbo.TableName AS T2
WHERE
[Some Where Clause]
AND T2.ColumnName <= T.ColumnName) ----- REQUIRED!
, [Column List]
FROM
dbo.TableName AS T
WHERE
[Some Where Clause]
ORDER BY
Rank
SELECT
Rank = (
SELECT
COUNT(DISTINCT T2.ColumnName)
FROM
dbo.TableName AS T2
WHERE
[Some Where Clause]
AND T2.ColumnName <= T.ColumnName) ----- REQUIRED!
, [Column List]
FROM
dbo.TableName AS T
WHERE
[Some Where Clause]
ORDER BY
Rank
Assign Value to Variable Using Dynamic SQL
Assign Value to Variable Using Dynamic SQL
DECLARE
@WhereClauseColumnName varchar(10)
, @ParmDefinition nvarchar(100)
, @End nvarchar(12)
, @SQL nvarchar(4000)
----- SET value for @WhereClauseColumnName
SELECT
@WhereClauseColumnName = 'SomeData'
SELECT
@ParmDefinition = '@C nvarchar(100) OUTPUT'
SET @SQL = '
SELECT TOP 1
@C = T.ColumnName
FROM
dbo.TableName AS T
WHERE
T.ColumnName NOT IN (
SELECT TOP ' + @MerchCount + '
BW2.ColumnName
FROM
dbo.TableName AS BW2
WHERE
BW2.WhereClauseColumnName = ' + @WhereClauseColumnName + '
ORDER BY
BW2.ColumnName)
AND T.WhereClauseColumnName = ' + @WhereClauseColumnName + '
ORDER BY
T.ColumnName'
EXECUTE sp_executesql @SQL, @ParmDefinition, @End OUTPUT
DECLARE
@WhereClauseColumnName varchar(10)
, @ParmDefinition nvarchar(100)
, @End nvarchar(12)
, @SQL nvarchar(4000)
----- SET value for @WhereClauseColumnName
SELECT
@WhereClauseColumnName = 'SomeData'
SELECT
@ParmDefinition = '@C nvarchar(100) OUTPUT'
SET @SQL = '
SELECT TOP 1
@C = T.ColumnName
FROM
dbo.TableName AS T
WHERE
T.ColumnName NOT IN (
SELECT TOP ' + @MerchCount + '
BW2.ColumnName
FROM
dbo.TableName AS BW2
WHERE
BW2.WhereClauseColumnName = ' + @WhereClauseColumnName + '
ORDER BY
BW2.ColumnName)
AND T.WhereClauseColumnName = ' + @WhereClauseColumnName + '
ORDER BY
T.ColumnName'
EXECUTE sp_executesql @SQL, @ParmDefinition, @End OUTPUT
Add AutoNumber To SELECT Statement
If you ever have a need to return Row Number along with recordset:
SELECT
(SELECT
COUNT(A.ColumnName) + 1
FROM
dbo.TableName AS A
WHERE
A.ColumnName < B.ColumnName) AS RowNumber
, B.ColumnName
FROM
dbo.TableName AS B
ORDER BY
B.ColumnName
SELECT
(SELECT
COUNT(A.ColumnName) + 1
FROM
dbo.TableName AS A
WHERE
A.ColumnName < B.ColumnName) AS RowNumber
, B.ColumnName
FROM
dbo.TableName AS B
ORDER BY
B.ColumnName
Add Counter (sequential Identity) Data in table
If you ever have the need to add a column with sequential Identity data in a table this SQL code will do the trick...
1: Table should have a column for this data with datatype int
2: Insert data in column:
DECLARE
@intCounter int
SELECT
@intCounter = 0
UPDATE T
SET
@intCounter = T.Counter = @intCounter + 1
FROM
dbo.table1 AS T
1: Table should have a column for this data with datatype int
2: Insert data in column:
DECLARE
@intCounter int
SELECT
@intCounter = 0
UPDATE T
SET
@intCounter = T.Counter = @intCounter + 1
FROM
dbo.table1 AS T
Friday, January 12, 2007
Enable and Disable Triggers
----- 1: Disable All Triggers
ALTER TABLE dbo.TableName
DISABLE TRIGGER ALL
----- 2: Enable All Triggers
ALTER TABLE dbo.TableName
ENABLE TRIGGER ALL
----- 3: Disable One Trigger
ALTER TABLE dbo.TableName
DISABLE TRIGGER TriggerName
----- 4: Enable One Trigger
ALTER TABLE dbo.TableName
ENABLE TRIGGER TriggerName
ALTER TABLE dbo.TableName
DISABLE TRIGGER ALL
----- 2: Enable All Triggers
ALTER TABLE dbo.TableName
ENABLE TRIGGER ALL
----- 3: Disable One Trigger
ALTER TABLE dbo.TableName
DISABLE TRIGGER TriggerName
----- 4: Enable One Trigger
ALTER TABLE dbo.TableName
ENABLE TRIGGER TriggerName
Index List for tables
SELECT
Object_Name(SI.ID) AS TableName
, SI.Name AS IndexName
FROM
sysindexes AS SI
WHERE
SI.IndID BETWEEN 1 AND 254
AND indexproperty(SI.ID, SI.Name, 'IsHypothetical') = 0
AND indexproperty(SI.ID, SI.Name, 'IsStatistics') = 0
AND indexproperty(SI.ID, SI.Name, 'IsAutoStatistics') = 0
----- For a particular Table
-- AND Object_Name(SI.ID) = 'TableName'
ORDER BY
TableName
, IndexName
----- Get list of Indexes and column detail
EXEC sp_helpindex TableName
Object_Name(SI.ID) AS TableName
, SI.Name AS IndexName
FROM
sysindexes AS SI
WHERE
SI.IndID BETWEEN 1 AND 254
AND indexproperty(SI.ID, SI.Name, 'IsHypothetical') = 0
AND indexproperty(SI.ID, SI.Name, 'IsStatistics') = 0
AND indexproperty(SI.ID, SI.Name, 'IsAutoStatistics') = 0
----- For a particular Table
-- AND Object_Name(SI.ID) = 'TableName'
ORDER BY
TableName
, IndexName
----- Get list of Indexes and column detail
EXEC sp_helpindex TableName
Thursday, January 11, 2007
Add and Drop Linked Server
1: Add Linked Server:
EXEC sp_addlinkedserver 'LinkedServerAlias,'', 'SQLOLEDB', 'DBServerName'
EXEC sp_addlinkedsrvlogin 'LinkedServerAlias', 'False', NULL, 'USERNAME', 'PASSWORD'
EXEC Sp_serveroption 'LinkedServerAlias', 'Data Access', 'True'
EXEC Sp_serveroption 'LinkedServerAlias', 'UseRemoteCollation', 'True'
2: Drop Linked Server:
EXEC sp_dropserver 'LinkedServerAlias', 'DropLogins'
EXEC sp_addlinkedserver 'LinkedServerAlias,'', 'SQLOLEDB', 'DBServerName'
EXEC sp_addlinkedsrvlogin 'LinkedServerAlias', 'False', NULL, 'USERNAME', 'PASSWORD'
EXEC Sp_serveroption 'LinkedServerAlias', 'Data Access', 'True'
EXEC Sp_serveroption 'LinkedServerAlias', 'UseRemoteCollation', 'True'
2: Drop Linked Server:
EXEC sp_dropserver 'LinkedServerAlias', 'DropLogins'
Subscribe to:
Posts (Atom)