vrijdag 12 december 2008

SQL Server 2000 FullText Search Service:How to set up and Query with Indexing Service

SQL Server 2000 FullText Search Service:How to set up and Query with Indexing Service
=====================================================================================
http://www.eggheadcafe.com/articles/20010422.asp

My Notes:
========
1):Create Database 'HelpCenter' with these tables:
---------------------------------------------------
USE [HelpCenter]
GO
/****** Object: Table [dbo].[faq_node] Script Date: 12/12/2008 15:34:50 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
SET ANSI_PADDING ON
GO
CREATE TABLE [dbo].[faq_node](
[id] [int] IDENTITY(1,1) NOT NULL,
[parent_id] [int] NOT NULL,
[ref_id] [int] NULL,
[position] [int] NULL,
[type] [varchar](10) NOT NULL,
[name] [varchar](50) NOT NULL,
[description] [varchar](200) NULL,
[path] [varchar](100) NOT NULL,
[language] [varchar](10) NOT NULL,
[keyword] [varchar](50) NULL,
CONSTRAINT [PK_faq_node] PRIMARY KEY CLUSTERED
(
[id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY]

GO
SET ANSI_PADDING OFF
GO
ALTER TABLE [dbo].[faq_node] WITH NOCHECK ADD CONSTRAINT [FK_faq_node_faq_node] FOREIGN KEY([parent_id])
REFERENCES [dbo].[faq_node] ([id])
GO
ALTER TABLE [dbo].[faq_node] CHECK CONSTRAINT [FK_faq_node_faq_node]
---------------------------------------------------------------------
USE [HelpCenter]
GO
/****** Object: Table [dbo].[faq_q_and_a] Script Date: 12/12/2008 15:35:57 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[faq_q_and_a](
[qa_id] [int] IDENTITY(1,1) NOT NULL,
[top] [int] NOT NULL,
[hot] [bit] NOT NULL,
[question] [text] NULL,
[answer] [text] NULL,
[hits] [int] NOT NULL CONSTRAINT [DF_faq_q_and_a_hits] DEFAULT (0),
CONSTRAINT [PK_faq_q_and_a] PRIMARY KEY CLUSTERED
(
[qa_id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
---------------------------------------------------------------------------
2): From Storage create this Catalogs:
=====================================
USE [HelpCenter]
GO
/****** Object: FullTextCatalog [faq_q_and_a] Script Date: 12/12/2008 15:36:31 ******/
CREATE FULLTEXT CATALOG [faq_q_and_a]
IN PATH N'd:\mssql\MSSQL\FTData'

----------------------------------
3): Add these Stored proedures:
===============================
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_hot_lst_xml] Script Date: 12/12/2008 15:37:57 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO

CREATE PROCEDURE [dbo].[faq_hot_lst_xml]
(
@language VARCHAR(10)
)
AS



SELECT TOP 1 *
FROM faq_q_and_a q_and_a INNER JOIN
faq_node n ON q_and_a.qa_id = n.ref_id
WHERE (n.[language] = @language)
AND hot = 1
ORDER BY [top] ASC
FOR XML AUTO
----------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_node_del] Script Date: 12/12/2008 15:38:27 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO

CREATE PROCEDURE [dbo].[faq_node_del]
(
@node_id INT
)
AS
DELETE FROM faq_q_and_a
WHERE qa_id IN
(
SELECT ref_id
FROM faq_node
WHERE [id] = @node_id
)
DELETE FROM faq_node WHERE [id]= @node_id
-----------------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_node_flat_tree] Script Date: 12/12/2008 15:38:45 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER ON
GO

CREATE PROCEDURE [dbo].[faq_node_flat_tree]
(
@language VARCHAR(10) = NULL
)
AS
SELECT 1 AS Tag, NULL AS Parent,
node.[id] AS [node!1!id],
node.parent_id AS [node!1!parent_id],
node.[position] AS [node!1!position],
node.type AS [node!1!type],
node.language AS [node!1!language],
node.[name] AS [node!1!name],
node.[description] AS [node!1!description],
node.[path] AS [node!1!path]
FROM faq_node node LEFT OUTER JOIN
faq_q_and_a q_and_a ON node.ref_id = q_and_a.qa_id
WHERE (@language IS NULL OR language = @language)

--and id < 45
ORDER BY [node!1!parent_id], [node!1!id]
FOR XML EXPLICIT
-------------------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_node_ins] Script Date: 12/12/2008 15:39:01 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER ON
GO

CREATE PROCEDURE [dbo].[faq_node_ins]
(
@node_id INT OUTPUT,
@parent_id INT,
@type VARCHAR(10)
)
AS
DECLARE @top INT
DECLARE @name VARCHAR(50)
DECLARE @path VARCHAR(100)
DECLARE @language VARCHAR(10)




IF @type = 'q_and_a'
BEGIN
SET @top = (
SELECT ISNULL(MAX([position]), 0) + 1 AS position
FROM faq_node
WHERE (faq_node.parent_id =@parent_id) AND (type = 'q_and_a')
)
SET @name ='Question ' + CAST(@top AS VARCHAR(10))
END
ELSE
BEGIN
SET @top = (
SELECT ISNULL(COUNT(id), 0) + 1 AS position
FROM faq_node
WHERE (parent_id =@parent_id) AND (type = 'folder')
)
SET @name ='Category ' + CAST(@top AS VARCHAR(10))
END
INSERT INTO faq_node
(
parent_id, [position], type, [name], path, language
)
VALUES
(
@parent_id, @top, @type, @name,'',''
)
SET @node_id = @@IDENTITY

IF(@parent_id = 0)
BEGIN
SET @path = '/' + LTRIM(CAST(@node_id AS VARCHAR(10)))
END
ELSE
BEGIN
SET @path = (SELECT path FROM faq_node WHERE [id] = @parent_id)
SET @path = RTRIM(@path) + '/' + LTRIM(CAST(@node_id AS VARCHAR(10)))
END

SET @language = (SELECT language FROM faq_node WHERE [id] = @parent_id)


UPDATE faq_node SET path = @path, language = @language WHERE [id] = @node_id
------------------------------------------------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_node_QuestionKeywordDetails] Script Date: 12/12/2008 15:39:19 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO

CREATE PROCEDURE [dbo].[faq_node_QuestionKeywordDetails]
@Language varchar(5),
@questionKeyword varchar(50),
@path varchar(50) output,
@parent_id int output,
@ref_id int output
AS


SELECT top 1 @parent_id=parent_id, @path=path, @ref_id=ref_id from faq_node where language=@Language and keyword=@questionKeyword

------------------------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_node_upd] Script Date: 12/12/2008 15:39:34 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER ON
GO

CREATE PROCEDURE [dbo].[faq_node_upd]
(
@node_id INT,
@ref_id INT = NULL,
@position INT = NULL,
@parent_id INT,
@name VARCHAR(50),
@description VARCHAR(200) = NULL,
@language VARCHAR(10) = NULL,
@keyword VARCHAR(50) = NULL
)
AS
UPDATE faq_node SET [position] = @position, parent_id = @parent_id, [name] = @name, [description] = @description, ref_id = @ref_id , keyword = @keyword
WHERE id = @node_id

IF (@language IS NOT NULL)
BEGIN
UPDATE faq_node SET language= @language
WHERE id = @node_id
END
-----------------------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_q_and_a_get] Script Date: 12/12/2008 15:39:51 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO

CREATE PROCEDURE [dbo].[faq_q_and_a_get]
(
@node_id INT
)
AS

SELECT id, parent_id, qa_id, position, name, description, question, answer, path, language, hot, [top], keyword
FROM faq_q_and_a q_and_a RIGHT OUTER JOIN faq_node node ON q_and_a.qa_id = node.ref_id
WHERE (node.id = @node_id)
--------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_q_and_a_get_xml] Script Date: 12/12/2008 15:40:03 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER ON
GO

CREATE PROCEDURE [dbo].[faq_q_and_a_get_xml]
(
@qa_id INT
)
AS
--SELECT *
--FROM faq_q_and_a q_and_a
--FOR XML AUTO
SELECT 1 AS Tag, NULL AS Parent,
q_and_a.qa_id AS [q_and_a!1!qa_id],
q_and_a.question AS [q_and_a!1!question!element],
q_and_a.answer AS [q_and_a!1!answer!element]
FROM faq_q_and_a q_and_a
WHERE (qa_id = @qa_id)
FOR XML EXPLICIT



UPDATE faq_q_and_a
SET Hits = Hits + 1
WHERE (qa_id = @qa_id)

------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_q_and_a_ins] Script Date: 12/12/2008 15:40:17 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER ON
GO

CREATE PROCEDURE [dbo].[faq_q_and_a_ins]
(
@qa_id INT OUTPUT,
@question TEXT,
@answer TEXT
)
AS
INSERT INTO faq_q_and_a (hot, [top], question, answer) VALUES (0, 0, @question, @answer)
SET @qa_id = @@IDENTITY
-----------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_q_and_a_lst] Script Date: 12/12/2008 15:40:28 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO

CREATE PROCEDURE [dbo].[faq_q_and_a_lst]
(
@parent_id INT

)
AS
SELECT qa_id, parent_id, question
FROM faq_node node INNER JOIN
faq_q_and_a q_and_a ON node.ref_id = q_and_a.qa_id
WHERE (node.parent_id = @parent_id)
ORDER BY node.[position]

----------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_q_and_a_lst_all_descendents] Script Date: 12/12/2008 15:40:39 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO


CREATE PROCEDURE [dbo].[faq_q_and_a_lst_all_descendents]
(
@language VARCHAR(10),
@keyword VARCHAR(50)

)
AS



DECLARE @path VARCHAR(100)

SET @PATH = (SELECT TOP 1 path FROM faq_node WHERE language = @language AND keyword = @keyword)

SELECT TOP 1 qa_id, parent_id, question, path
FROM faq_node node INNER JOIN
faq_q_and_a q_and_a ON node.ref_id = q_and_a.qa_id
WHERE (node.path LIKE @path + '%')
ORDER BY NewID()
------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_q_and_a_lst_xml] Script Date: 12/12/2008 15:40:51 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO

CREATE PROCEDURE [dbo].[faq_q_and_a_lst_xml]
(
@parent_id INT

)
AS
SELECT q_and_a.qa_id, node.[position], q_and_a.question, q_and_a.answer
FROM faq_node node INNER JOIN
faq_q_and_a q_and_a ON node.ref_id = q_and_a.qa_id
WHERE (node.parent_id = @parent_id)
ORDER BY node.[position]
FOR XML AUTO

------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_q_and_a_search] Script Date: 12/12/2008 15:41:07 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO

CREATE PROCEDURE [dbo].[faq_q_and_a_search]
(
@search VARCHAR(500),
@language VARCHAR(500)
)
AS

SELECT q_and_a.qa_id, q_and_a.question, parent.name

FROM faq_q_and_a q_and_a INNER JOIN
CONTAINSTABLE(faq_q_and_a, *, @search, 100) KEY_TBL ON q_and_a.qa_id = KEY_TBL.[KEY] INNER JOIN
faq_node node ON q_and_a.qa_id = node.ref_id INNER JOIN
faq_node parent ON node.parent_id = parent.id
WHERE (node.[language] = @language)

ORDER BY rank DESC
FOR XML AUTO
--------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_q_and_a_upd] Script Date: 12/12/2008 15:41:17 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER ON
GO

CREATE PROCEDURE [dbo].[faq_q_and_a_upd]
(
@qa_id INT OUTPUT,
@top INT= 0,
@hot INT= 0,
@question TEXT,
@answer TEXT
)
AS

IF(@hot > 0)
BEGIN
DECLARE @language VARCHAR(10)

SET @language = (
SELECT TOP 1 n.[language]
FROM faq_q_and_a qa INNER JOIN faq_node n ON qa.qa_id = n.ref_id
WHERE (qa.qa_id = @qa_id)
)

UPDATE faq_q_and_a
SET hot = 0
FROM faq_q_and_a qa INNER JOIN
faq_node n ON qa.qa_id = n.ref_id
WHERE (n.[language] = @language)
END

UPDATE faq_q_and_a SET [top] = @top, hot = @hot, question = @question, answer = @answer
WHERE qa_id = @qa_id
--------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_top_10_del] Script Date: 12/12/2008 15:41:34 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO

CREATE PROCEDURE [dbo].[faq_top_10_del]
(
@language VARCHAR(10)
)
AS

UPDATE faq_q_and_a
SET [top] = 0
FROM faq_q_and_a qa INNER JOIN
faq_node n ON qa.qa_id = n.ref_id
WHERE (n.[language] = @language)
----------------------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_top_10_ins] Script Date: 12/12/2008 15:41:47 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO

CREATE PROCEDURE [dbo].[faq_top_10_ins]
(
@qa_id INT,
@top INT
)
AS
UPDATE faq_q_and_a
SET [top]=@top
WHERE qa_id = @qa_id


----------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_top_10_lst] Script Date: 12/12/2008 15:41:58 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO
CREATE PROCEDURE [dbo].[faq_top_10_lst]
(
@language VARCHAR(10)
)
AS


SELECT TOP 10 [top], qa_id, question
FROM faq_q_and_a qa INNER JOIN
faq_node n ON qa.qa_id = n.ref_id
WHERE (n.[language] = @language)
AND [top] > 0

ORDER BY [top] ASC
-------------------------------------
USE [HelpCenter]
GO
/****** Object: StoredProcedure [dbo].[faq_top_10_lst_xml] Script Date: 12/12/2008 15:42:09 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO
CREATE PROCEDURE [dbo].[faq_top_10_lst_xml]
(
@language VARCHAR(10)
)
AS

SELECT TOP 10 *
FROM faq_q_and_a q_and_a INNER JOIN
faq_node n ON q_and_a.qa_id = n.ref_id
WHERE (n.[language] = @language)
AND [top] > 0
ORDER BY [top] ASC
FOR XML AUTO

8 Steps to better Transaction Log throughput -The Best Link

8 Steps to better Transaction Log throughput:
==============================================
http://sqlskills.com/blogs/Kimberly/post/8-Steps-to-better-Transaction-Log-throughput.aspx



On a few of my last trips, I've had the pleasure of doing a lot of customer visits - visiting some of the larger implementations of SQL Server. Many of these implementations are at banks where both performance and recovery are critical. After my trip to Turkey (where I did customer visits prior to presenting at the Microsoft Professional Developers Summit), I received this email:

As you might remember we talked at xxxbank, Turkey about performance problems in transaction log backups and splitting tempdb data file into equal size chunks equal to number of CPUs. I have implemented both optimizations in my troubled server, and the results are great! Transaction log backups do not impact the server at all and with lowered congestion in tempdb, overall system performance has gone up as well.

So - this is my first of two blog entries targeting these two VERY typical problems:
* Transaction log fragmentation (both internal and external - this customer was having problems with internal fragmentation)
* Optimizing TempDB

We'll start with 8 Steps to Optimizing your Transaction Log - not necessarily in any order. It's best to review all of these to make sure you have a comprehensive view of how to improve both performance and recovery of your transaction logs!

1) Try to dedicate the transaction log portion of your database to its own phyiscal disk. In high volume OLTP system, isolating the transaction log can allow the disk head to be ready for the next write by not having other files contend for the physical disk resource. If your database already exists, the best way to "move" your transaction log is to detach your database (sp_detach_db) and then reattach it (sp_attach_db). Here's a great KB article titled: Moving SQL Server databases to a new location with Detach/Attach.

2) Defrag the disk(s) on which your transaction logs reside. This will get rid of external fragmentation of the transaction log - better known as disk file fragmentation. This will require that your server be taken offline but if your files have had a lot of autogrowth and/or they reside on a disk with a lot of other files that have been modified, then all of your files (incl. the transaction log file) are likely to be interleaved and fragmented. The good news is that this is a one time operation (unless the files continue to grow and shrink excessively - but we'll fix that as well). The bad news is that you'll need to shutdown SQL Server and then use some form of system tool - Microsoft or third party - to defrag. On Windows XP and Windows Server 2003 machines, the disk defragmenter tool can be launched by running: dfrg.msc.

3) Create only ONE transaction log file. Even though you can create multiple transaction log files, you only need one... SQL Server DOES not "stripe" across multiple transaction log files. Instead, SQL Server uses the transaction log files sequentially. While this might sound bad - it's not. If you want to get better performance out of the transaction log, place it on faster disks and/or a more performant disk (RAID) configuration. You could place the transaction log on a RAID 0 array but you'll likely want fault tolerance as well as performance. Isolation, as per number 1 is a good start but if you also need increased capacity then using hardware based RAID to combine disks for both better performance and redundancy is your next choice. See 4.

4) Not only should you try to isolate the transaction log to its own physical disk but you should make sure that the logical/physical disk configuration is as efficient as possible. Try to use an isolated RAID 1 mirroring set if you don't need significant capacity. If you need a greater capacity OR you want better performance, consider a combination of RAID 0 and RAID 1 (either RAID 0 + 1 or RAID 1 + 0). While RAID 0 + 1 can often offer better performance, RAID 1 + 0 offers better reliability. If you're new to RAID and are interested in learning more - check out the RAID Tutorial here: http://www.raidarray.com/04_01_00.html.

5) Don't be caught up in nothing but transaction log speed, you'll also want to make sure that your transaction log is always available as this can help you in times of disaster. Even if the data is damaged, if the transaction log is available and you have a series of backups up to and including the last transaction log then you can make a final backup of the transaction log that will represent all of the changes since your last transaction log backup. If this backup is possible (it's called backing up the "tail" of the log), then you can achieve up-to-the-minute recovery. This is only possible when the log is available. By placing the log on mirrored disks you can increase your chances of recovering data and minimize data loss!

6) Create transaction log files with a reasonable initial size. When you create a database it's ideal to PRE-ALLOCATE both your data files and your transaction log file. A little bit of capacity planning goes a long way... Now, if you think that you've got absolutely no idea how to size your transaction log you're going to need, well - here are the things that have the greatest impact:

Type of activity - transaction processing or decision support
Frequency of that activity - the more frequent the changes, the faster the transaction log will grow
Recovery Model - the recovery model of the database
Frequency of transaction log backups
Whether or not replication is used (since the log reader relies on the transaction log)
I wish I could give you a rough idea on sizing but if you look only at database size and none of these other factors, you could end up with a transaction log that's seriously oversized or seriously undersized. I've seen recommendations of 10-25% of the size of the data and you can use that but I would also add a bit of common sense. A larger database with very frequent transaction log backups may not need a transaction log that's even 1% of the size of the data... The best way is to setup your development environment similar to that of your production environment (including backup jobs) and then see how the transaction log grows. If you have a lot of autogrowth (because your guess was wrong), you can later clean up the fragmentation that has occurred and get back to a reasonable, intact, and optimal transaction log. If you want to know more about Recovery Models and their impact on the transaction log, review a sample book chapter from an MSPress book (Microsoft® SQL Server™ 2000 High Availability) that I helped to co-author. I only helped out on a couple of chapters but this specific chapter explains a lot about the general database environment settings that impact recovery. You can download Chapter 9: Database Environment Basics for Recovery here: http://www.sqlskills.com/resources/SQLServerHAChapter9.pdf

7):Don't let autogrowth get out of control. As important as capacity planning, you're not likely to be spot-on in your estimates. I don't recommend completely turning off autogrowth but in general I also don't like SQL Server 2000's default growth rate (or max size). In general, I would recommend setting ALL of the transaction log files attributes: initial size, growth rate AND maximum size. For the growth rate, I recommend something that can be allocated somewhat quickly and something of a fixed size. In general, I recommend a value which is less than or equal to 1GB (based on total size) but something that doesn't mean that you're going to autogrow again soon. So, for databases whose transaction logs are under 1GB then you might set autogrow to somewhere between 20 and 100 MB. For those of you who have transaction logs measured in GB, then I'd set the autogrowth to 500MB or 1GB. In general, I don't like the percentage because it needs to be calculated (which I realize isn't really all that big of a deal) but the larger the file the larger the autogrowth and the longer it takes. In SQL Server 2000, autogrowth can create blocking...so, it's best to minimize this in general.

8) Check/fix your internal fragmentation. OK, so this is the one that really helped the customer in Turkey. Often, when transaction logs are not pre-allocated and/or when there's been a lot of autogrowths, the transaction log can become internally fragmented. Internally your transaction logs are broken down into smaller more granular chunks called VLFs (Virtual Log Files). The size and number of VLFs you'll have depends largely on the size that the chunk is when it's added to you transaction log. If you add a new chunk to the transaction log which is 20MB (through autogrowth or through manual growth) then the number of VLFs that are added is 4. If you add a chunk which is greater than 64MB but less than or equal to 1GB, you'll add 8 VLFs. If you add more than 1GB then you'll add 16VLFs. In general, most transaction logs will only have 20 or 30 VLFs - even 50 could be reasonable depending on the total size of the transaction log. However, in many cases what happens is that excessive autogrowths can cause an excessive number of VLFs to be added - sometimes resulting in hundreds of VLFs. Having an excessive number of VLFs can negatively impact all transaction log related activities and you may even see degradation in performance when transaction log backups occur. To see how many VLFs you have solely look at the number of rows returned by DBCC LOGINFO. The number of rows returned equals the number of VLFs your transaction log file has. If you have more than 50, I would recommend fixing it and adjusting your autogrowth so that it doesn't occur as fequently. To get rid of all of the execessive VLFs, follow these easy steps to shrink off the fragmented chunk and add a new, clean chunk to your transaction log:

1. Wait for an inactive time of day (ideally, it would be best to put the database into single user mode first) and then clear all transaction log activity through a regular transaction log backup. If you're using the simple recovery model then you don't need to do a log backup... Instead, just clear the transaction log by running a checkpoint.

BACKUP LOG databasename TO devicename

2. Shrink the log to as small a size as possible (truncateonly)

DBCC SHRINKFILE(transactionloglogicalfilename, TRUNCATEONLY)

NOTE: if you don't know the logical filename of your transaction log use sp_helpfile to list all of your database files.

3. Alter the database to modify the transaction log file to the appropriate size - in one step

ALTER DATABASE databasename
MODIFY FILE
(
NAME = transactionloglogicalfilename
, SIZE = newtotalsize
)

And, that should do it!

INF: Shrinking the Transaction Log in SQL Server 2000 with DBCC SHRINKFILE(DBCC SHRINKFILE and Defragmentaion)

INF: Shrinking the Transaction Log in SQL Server 2000 with DBCC SHRINKFILE(Q272318):
-----------------------------------------------------------------------------------
http://support.microsoft.com/kb/272318

Shrinking the log in SQL Server 2000 is no longer a deferred operation. A shrink operation attempts to shrink the file immediately. However, in some circumstances it may be necessary to perform additional actions before the log file is shrunk to the desired size.

When DBCC SHRINKFILE is run, SQL Server 2000 shrinks the log file by removing as many virtual log files as it can to attempt to reach the target size. If the target file size is not reached, SQL Server places dummy log entries in the last virtual log file until the virtual log is filled and moves the head of the log to the beginning of the file. The following actions are then required to complete the shrinking of the transaction log:
You must run a BACKUP LOG statement to free up space by removing the inactive portion of the log.
You must run DBCC SHRINKFILE again with the desired target size until the log file shrinks to the target size.
The following example demonstrates this with the pubs database and attempts to shrink the pubs_log file to 2 MB:
Run this code:

DBCC SHRINKFILE(pubs_log, 2)


NOTE: If the target size is not reached, proceed to the next step.
Run this code if you want to truncate the transaction log and not keep a backup of the transaction log. Truncate_only invalidates your transaction log backup sequence. Take a full backup of your database after you perform backup log with truncate_only:

BACKUP LOG pubs WITH TRUNCATE_ONLY

-or-
Run this code if you want to keep a backup of your transaction log and keep your transaction log backup sequence intact. See SQL Server Books Online topic "BACKUP" for more information:
BACKUP LOG pubs TO pubslogbackup

Run this code:
DBCC SHRINKFILE(pubs_log,2)


The transaction log has now been shrunk to the target size.

How to monitor blocking in SQL Server 2005 and in SQL Server 2000 -- sp_blocker_pss80

How to monitor blocking in SQL Server 2005 and in SQL Server 2000 -- Microsoft document (sp_blocker_pss80)
-----------------------------------------------------------------------------:
http://support.microsoft.com/kb/271509/

Follow the steps in the link to create and test and resolve the blocking problem

Use DBCC OPENTRAN -- When -- Why

Check this important link from Microsoft:
=========================================
Incomplete transaction may hold large number of locks and cause blocking:
------------------------------------------------------------------------
http://support.microsoft.com/kb/295108/

Check open transactions in the database:
========================================
1): Create table 'oldtransactions'
----------------------------------:
USE [master]
GO
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
SET ANSI_PADDING ON
GO
CREATE TABLE [dbo].[oldtransactions](
[db] [varchar](255) NULL,
[urenoud] [int] NULL,
[changed] [datetime] NULL
) ON [PRIMARY]

GO
SET ANSI_PADDING OFF
----------------------------
2): Create Job ,on Master database "CheckOpenTran":
--------------------------------------------------
DECLARE map_cursor CURSOR FOR select name from master..sysdatabases where not name in ('master','model','msdb','tempdb','distribution')
OPEN map_cursor
FETCH NEXT FROM map_cursor INTO @db
WHILE @@FETCH_STATUS = 0
BEGIN

create table #temp(field varchar(50),inhoud varchar(50))
insert into #temp
exec ('dbcc opentran('+@db+') with tableresults')
set @urenoud=0
select @urenoud=datediff(n, inhoud, getdate()) from #temp where field='OLDACT_STARTTIME'
drop table #temp
set @urenoud=isnull(@urenoud, 0)
insert oldtransactions(db, urenoud, changed) values (@db, @urenoud, getdate())
FETCH NEXT FROM map_cursor INTO @db
END
CLOSE map_cursor
DEALLOCATE map_cursor
------------------------
3): Schedule to run the job each 15 minutes of the day(Occurs every day every 15 minute(s) between 12:00:00 AM and 11:59:59 PM. Schedule will be used starting on 1/31/2006.)

---------------------------------------------
4): Create Stored procedure 'CheckOpenTran':
--------------------------------------------
USE [master]
GO
/****** Object: StoredProcedure [dbo].[CheckOpenTran] Script Date: 12/12/2008 13:20:10 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
ALTER Procedure [dbo].[CheckOpenTran](
@maxtime int=1
)
as
declare @db varchar(200)
declare @snapshotage int
declare @oldest int

declare @ret int
set @ret=0
declare @description varchar(200)
set @description=''

select @snapshotage=max(datediff(n, changed, getdate())) from oldtransactions

if @snapshotage<17
begin
select top 1 @db=db, @oldest=urenoud from oldtransactions where urenoud>=@maxtime order by urenoud
set @oldest=isnull(@oldest, 0)
if @oldest>0
begin
set @ret=1
set @description=@description+@db+' (transactie '+ cast(@oldest as varchar(5)) + ' minuten oud)'
End
else
begin
set @ret=0
set @description='Transacties ok'
End
end
else
begin
set @ret=1
set @description='Open Transactie SQL Job draaid niet'
end

select ret=@ret, message=@description
---------------------------
5): Execute 'CheckOpenTran' stored procedure when yoe need to check for an open transactions.

Reading SQL Trace Files using fn_trace_gettable function

Reading SQL Trace Files using fn_trace_gettable function:
--------------------------------------------------------
http://kalpeshshirodker.wordpress.com/2008/02/15/reading-sql-trace-files-using-fn_trace_gettable-function/

Notes(Important):
----------------
1): Run SQL Profiler to record events you want to check.
2): Be sure that the result file will be saved to a disk with '.trc'.
3): after stop recording search in the result file by using the following query:

SELECT Count(*) as CountOfEvents,
AVG(Duration) AS AvgDuration,
SUM(Duration) AS [SumDuration],
SUBSTRING(TextData, 1, 30) AS [Text Data]
FROM ::fn_trace_gettable('C:\Program Files\Microsoft SQL Server\MSSQL.1\MSSQL\LOG\log_41.trc',default)
WHERE EventClass in (10,12) -- BatchCompleted, RPC Completed
GROUP BY SUBSTRING(TextData, 1, 30)
ORDER BY SUM(Duration) DESC

This will give a list with all queries runs on the database at the record time.