我使用C#填充数据库表(SQL Server 2005或2008)。我希望数据库表的大小尽可能小,因为Godaddy只允许200 MB大小的数据库,我想从中获得最大的收益。我应该在插入时压缩我的字符串数据,还是有更好的方法可以通过某种形式的压缩来保持数据库大小?
答案 0 :(得分:2)
一旦考虑到字符串在数据库中,您还需要考虑它们。任何需要quwry?如果是这样,压缩可能会使这非常困难/昂贵。
答案 1 :(得分:0)
在大多数情况下,压缩字符串数据会带来好处。你最好做以下事情:
对于第3步。我使用以下T-SQL脚本压缩数据库(您可以对其添加修改)
/*
Description: This stored procedure can compress a database or list out estimations for the savings from possible compression.
Create the stored procedure on database level.
Call:
USE AdventureWorksDW2014;
GO
EXEC dbo.sp_compress_database
@dbname = 'AdventureWorksDW2014', -- Database to compress
@compression_type = 'PAGE', -- Compression type: PAGE, ROW or NONE
@mode = 1; -- Mode can be: 1 -> compression and 0 -> estimation for compression
Author: Igor Micev
Date: 2012-10-25
*/
SET ANSI_NULLS ON;
GO
SET QUOTED_IDENTIFIER ON;
GO
ALTER PROCEDURE [dbo].[sp_compress_database]
(
@dbname NVARCHAR(100),@compression_type VARCHAR(10),@mode BIT
)
AS
BEGIN
SET NOCOUNT ON;
IF OBJECT_ID('tempdb..#tables_for_compression') IS NOT NULL DROP TABLE #tables_for_compression;
CREATE TABLE #tables_for_compression
(
[id] INT IDENTITY(1,1) NOT NULL PRIMARY KEY,[object_name] VARCHAR(100),[schema_name] VARCHAR(20),[index_id] INT,[partition_number] INT,[size_before_compression_KB] INT,[size_after_compression_KB] INT,[sample_size_before_compression_KB] INT,[sample_size_after_compression_KB] INT
);
IF @compression_type NOT IN('PAGE','ROW','NONE')
BEGIN
RAISERROR('Compression type is not valid.',16,1);
RETURN;
END;
DECLARE @dynamic_cmd NVARCHAR(1000);
DECLARE @tbl NVARCHAR(100);
DECLARE @schema NVARCHAR(20);
DECLARE tbl_cursor CURSOR FOR SELECT isc.TABLE_NAME,isc.TABLE_SCHEMA
FROM INFORMATION_SCHEMA.TABLES AS isc
WHERE isc.TABLE_CATALOG=@dbname AND isc.TABLE_TYPE='BASE TABLE';
OPEN tbl_cursor;
FETCH NEXT FROM tbl_cursor INTO @tbl,@schema;
--Examine the clustered indexes
WHILE @@fetch_status=0
BEGIN
IF @mode=1
BEGIN
SET @dynamic_cmd='USE ['+@dbname+'] ALTER TABLE ['+@schema+'].['+@tbl+']
REBUILD PARTITION = ALL WITH (ONLINE = ON, DATA_COMPRESSION = '+@compression_type+')';
END;
BEGIN TRY
IF @mode=0
BEGIN
SET @dynamic_cmd='EXEC sp_estimate_data_compression_savings '''+@schema+''', '''+@tbl+''', NULL, NULL,'''+@compression_type+'''';
INSERT INTO #tables_for_compression([object_name],[schema_name],index_id,partition_number,size_before_compression_KB,size_after_compression_KB,sample_size_before_compression_KB,sample_size_after_compression_KB)
EXEC sp_executesql @dynamic_cmd;
END;
IF @mode=1
BEGIN
EXEC sp_executesql @dynamic_cmd;
PRINT @schema+'.'+@tbl+' was compressed.';
END;
END TRY
BEGIN CATCH
PRINT 'Failed command: '+@dynamic_cmd;
END CATCH;
FETCH NEXT FROM tbl_cursor INTO @tbl,@schema;
END;
CLOSE tbl_cursor;
DEALLOCATE tbl_cursor;
--Examine the nonclustered indexes. Exclude XML type indexes.
IF @mode=1
BEGIN
DECLARE @ind_name NVARCHAR(100);
DECLARE ncix CURSOR FOR SELECT ss.name AS [schema],OBJECT_NAME(ddips.object_id) AS table_name,si.name AS index_name
FROM sys.dm_db_index_physical_stats(DB_ID(),NULL,NULL,NULL,'SAMPLED') AS ddips
JOIN sys.indexes AS si ON ddips.index_id=si.index_id AND ddips.object_id=si.object_id
JOIN sys.tables AS st ON ddips.object_id=st.object_id
JOIN sys.schemas AS ss ON st.schema_id=ss.schema_id
WHERE si.index_id>1 AND si.[type]=2 AND ddips.page_count>64;
--Nonclustered indexes with more than 64 pages
OPEN ncix;
FETCH NEXT FROM ncix INTO @schema,@tbl,@ind_name;
WHILE(@@fetch_status=0)
BEGIN
SET @dynamic_cmd='ALTER INDEX '+@ind_name+' ON '+@schema+'.'+@tbl+'
REBUILD WITH (ONLINE = ON, DATA_COMPRESSION = '+@compression_type+')';
BEGIN TRY
EXEC sp_executesql @dynamic_cmd;
PRINT 'Index '+@ind_name+' was compressed.';
END TRY
BEGIN CATCH
PRINT 'Index '+@ind_name+' cannot be compressed. Err.Msg: '+@@error;
END CATCH
FETCH NEXT FROM ncix INTO @schema,@tbl,@ind_name;
END;
CLOSE ncix;
DEALLOCATE ncix;
END
IF @mode=0
SELECT *
FROM #tables_for_compression;
IF OBJECT_ID('tempdb..#tables_for_compression') IS NOT NULL DROP TABLE #tables_for_compression;
END