2023年6月21日发(作者:)
【SQLServer批量插⼊数据】在SQL Server 中插⼊⼀条数据使⽤Insert语句,但是如果想要批量插⼊⼀堆数据的话,循环使⽤Insert不仅效率低,⽽且会导致SQL⼀系统性能问题。下⾯介绍SQL Server⽀持的两种批量数据插⼊⽅法:Bulk和表值参数(Table-Valued Parameters),⾼效插⼊数据。新建数据库:--Create DataBase
create database BulkTestDB;
go
use BulkTestDB;
go
--Create Table
Create table BulkTestTable(
Id int primary key,
UserName nvarchar(32),
Pwd varchar(16))
go
⼀.传统的INSERT⽅式先看下传统的INSERT⽅式:⼀条⼀条的插⼊(性能消耗越来越⼤,速度越来越慢) //使⽤简单的Insert⽅法⼀条条插⼊ [慢] #region [ simpleInsert ] static void simpleInsert() { ine("使⽤简单的Insert⽅法⼀条条插⼊"); Stopwatch sw = new Stopwatch(); SqlConnection sqlconn = new SqlConnection("server=.;database=BulkTestDB;user=sa;password=123456;"); SqlCommand sqlcmd = new SqlCommand(); dText = ("insert into BulkTestTable(Id,UserName,Pwd)values(@p0,@p1,@p2)"); ("@p0", ); ("@p1", ar); ("@p2", ar); dType = ; tion = sqlconn; (); try { //循环插⼊1000条数据,每次插⼊100条,插⼊10次。
for (int multiply = 0; multiply < 10; multiply++) { for (int count = multiply * 100; count < (multiply + 1) * 100; count++) { ters["@p0"].Value = count; ters["@p1"].Value = ("User-{0}", count * multiply); ters["@p2"].Value = ("Pwd-{0}", count * multiply); (); eNonQuery(); (); } //每插⼊10万条数据后,显⽰此次插⼊所⽤时间
ine(("Elapsed Time is {0} Milliseconds", dMilliseconds)); } y(); } catch (Exception ex) { ine(e); } } #endregion循环插⼊1000条数据,每次插⼊100条,插⼊10次,效率是越来越慢。
⼆.较快速的Bulk插⼊⽅式:使⽤使⽤Bulk插⼊[ 较快 ] //使⽤Bulk插⼊的情况 [ 较快 ] #region [ 使⽤Bulk插⼊的情况 ] static void BulkToDB(DataTable dt) { Stopwatch sw = new Stopwatch(); SqlConnection sqlconn = new SqlConnection("server=.;database=BulkTestDB;user=sa;password=123456;"); SqlBulkCopy bulkCopy = new SqlBulkCopy(sqlconn); ationTableName = "BulkTestTable"; ize = ; try { (); if (dt != null && != 0) { oServer(dt); } } catch (Exception ex) { ine(e); } finally { (); if (bulkCopy != null) { (); } } } static DataTable GetTableSchema() { DataTable dt = new DataTable(); ge(new DataColumn[] {
new DataColumn("Id",typeof(int)), new DataColumn("UserName",typeof(string)), new DataColumn("Pwd",typeof(string)) }); return dt; } static void BulkInsert() { ine("使⽤简单的Bulk插⼊的情况"); Stopwatch sw = new Stopwatch(); for (int multiply = 0; multiply < 10; multiply++) { DataTable dt = GetTableSchema(); for (int count = multiply * 100; count < (multiply + 1) * 100; count++) { DataRow r = (); r[0] = count; r[1] = ("User-{0}", count * multiply); r[2] = ("Pwd-{0}", count * multiply); (r); } (); BulkToDB(dt); (); ine(("Elapsed Time is {0} Milliseconds", dMilliseconds)); } } #endregion循环插⼊1000条数据,每次插⼊100条,插⼊10次,效率快了很多。
三.使⽤简称TVPs插⼊数据
打开sqlserrver,执⾏以下脚本:--Create Table Valued
CREATE TYPE BulkUdt AS TABLE
(Id int,
UserName nvarchar(32),
Pwd varchar(16))
成功后在数据库中发现多了BulkUdt的缓存表。
使⽤简称TVPs插⼊数据 //使⽤简称TVPs插⼊数据 [最快] #region [ 使⽤简称TVPs插⼊数据 ] static void TbaleValuedToDB(DataTable dt) { Stopwatch sw = new Stopwatch(); SqlConnection sqlconn = new SqlConnection("server=.;database=BulkTestDB;user=sa;password=123456;"); const string TSqlStatement = "insert into BulkTestTable (Id,UserName,Pwd)" + " SELECT , me," + " FROM @NewBulkTestTvp AS nc"; SqlCommand cmd = new SqlCommand(TSqlStatement, sqlconn); SqlParameter catParam = hValue("@NewBulkTestTvp", dt); ype = ured; me = "t"; try { (); if (dt != null && != 0) { eNonQuery(); } } catch (Exception ex) { ine("error>" + e); } finally { (); } } static void TVPsInsert() { ine("使⽤简称TVPs插⼊数据"); Stopwatch sw = new Stopwatch(); for (int multiply = 0; multiply < 10; multiply++) { DataTable dt = GetTableSchema(); for (int count = multiply * 100; count < (multiply + 1) * 100; count++) { DataRow r = (); r[0] = count; r[1] = ("User-{0}", count * multiply); r[2] = ("Pwd-{0}", count * multiply); (r); } (); TbaleValuedToDB(dt); (); ine(("Elapsed Time is {0} Milliseconds", dMilliseconds)); } ne();
} #endregion循环插⼊1000条数据,每次插⼊100条,插⼊10次,效率是越来越慢,后⾯测试,将每次插⼊的数据量增⼤,会更⼤的体现TPVS插⼊的效率。
2023年6月21日发(作者:)
【SQLServer批量插⼊数据】在SQL Server 中插⼊⼀条数据使⽤Insert语句,但是如果想要批量插⼊⼀堆数据的话,循环使⽤Insert不仅效率低,⽽且会导致SQL⼀系统性能问题。下⾯介绍SQL Server⽀持的两种批量数据插⼊⽅法:Bulk和表值参数(Table-Valued Parameters),⾼效插⼊数据。新建数据库:--Create DataBase
create database BulkTestDB;
go
use BulkTestDB;
go
--Create Table
Create table BulkTestTable(
Id int primary key,
UserName nvarchar(32),
Pwd varchar(16))
go
⼀.传统的INSERT⽅式先看下传统的INSERT⽅式:⼀条⼀条的插⼊(性能消耗越来越⼤,速度越来越慢) //使⽤简单的Insert⽅法⼀条条插⼊ [慢] #region [ simpleInsert ] static void simpleInsert() { ine("使⽤简单的Insert⽅法⼀条条插⼊"); Stopwatch sw = new Stopwatch(); SqlConnection sqlconn = new SqlConnection("server=.;database=BulkTestDB;user=sa;password=123456;"); SqlCommand sqlcmd = new SqlCommand(); dText = ("insert into BulkTestTable(Id,UserName,Pwd)values(@p0,@p1,@p2)"); ("@p0", ); ("@p1", ar); ("@p2", ar); dType = ; tion = sqlconn; (); try { //循环插⼊1000条数据,每次插⼊100条,插⼊10次。
for (int multiply = 0; multiply < 10; multiply++) { for (int count = multiply * 100; count < (multiply + 1) * 100; count++) { ters["@p0"].Value = count; ters["@p1"].Value = ("User-{0}", count * multiply); ters["@p2"].Value = ("Pwd-{0}", count * multiply); (); eNonQuery(); (); } //每插⼊10万条数据后,显⽰此次插⼊所⽤时间
ine(("Elapsed Time is {0} Milliseconds", dMilliseconds)); } y(); } catch (Exception ex) { ine(e); } } #endregion循环插⼊1000条数据,每次插⼊100条,插⼊10次,效率是越来越慢。
⼆.较快速的Bulk插⼊⽅式:使⽤使⽤Bulk插⼊[ 较快 ] //使⽤Bulk插⼊的情况 [ 较快 ] #region [ 使⽤Bulk插⼊的情况 ] static void BulkToDB(DataTable dt) { Stopwatch sw = new Stopwatch(); SqlConnection sqlconn = new SqlConnection("server=.;database=BulkTestDB;user=sa;password=123456;"); SqlBulkCopy bulkCopy = new SqlBulkCopy(sqlconn); ationTableName = "BulkTestTable"; ize = ; try { (); if (dt != null && != 0) { oServer(dt); } } catch (Exception ex) { ine(e); } finally { (); if (bulkCopy != null) { (); } } } static DataTable GetTableSchema() { DataTable dt = new DataTable(); ge(new DataColumn[] {
new DataColumn("Id",typeof(int)), new DataColumn("UserName",typeof(string)), new DataColumn("Pwd",typeof(string)) }); return dt; } static void BulkInsert() { ine("使⽤简单的Bulk插⼊的情况"); Stopwatch sw = new Stopwatch(); for (int multiply = 0; multiply < 10; multiply++) { DataTable dt = GetTableSchema(); for (int count = multiply * 100; count < (multiply + 1) * 100; count++) { DataRow r = (); r[0] = count; r[1] = ("User-{0}", count * multiply); r[2] = ("Pwd-{0}", count * multiply); (r); } (); BulkToDB(dt); (); ine(("Elapsed Time is {0} Milliseconds", dMilliseconds)); } } #endregion循环插⼊1000条数据,每次插⼊100条,插⼊10次,效率快了很多。
三.使⽤简称TVPs插⼊数据
打开sqlserrver,执⾏以下脚本:--Create Table Valued
CREATE TYPE BulkUdt AS TABLE
(Id int,
UserName nvarchar(32),
Pwd varchar(16))
成功后在数据库中发现多了BulkUdt的缓存表。
使⽤简称TVPs插⼊数据 //使⽤简称TVPs插⼊数据 [最快] #region [ 使⽤简称TVPs插⼊数据 ] static void TbaleValuedToDB(DataTable dt) { Stopwatch sw = new Stopwatch(); SqlConnection sqlconn = new SqlConnection("server=.;database=BulkTestDB;user=sa;password=123456;"); const string TSqlStatement = "insert into BulkTestTable (Id,UserName,Pwd)" + " SELECT , me," + " FROM @NewBulkTestTvp AS nc"; SqlCommand cmd = new SqlCommand(TSqlStatement, sqlconn); SqlParameter catParam = hValue("@NewBulkTestTvp", dt); ype = ured; me = "t"; try { (); if (dt != null && != 0) { eNonQuery(); } } catch (Exception ex) { ine("error>" + e); } finally { (); } } static void TVPsInsert() { ine("使⽤简称TVPs插⼊数据"); Stopwatch sw = new Stopwatch(); for (int multiply = 0; multiply < 10; multiply++) { DataTable dt = GetTableSchema(); for (int count = multiply * 100; count < (multiply + 1) * 100; count++) { DataRow r = (); r[0] = count; r[1] = ("User-{0}", count * multiply); r[2] = ("Pwd-{0}", count * multiply); (r); } (); TbaleValuedToDB(dt); (); ine(("Elapsed Time is {0} Milliseconds", dMilliseconds)); } ne();
} #endregion循环插⼊1000条数据,每次插⼊100条,插⼊10次,效率是越来越慢,后⾯测试,将每次插⼊的数据量增⼤,会更⼤的体现TPVS插⼊的效率。
发布评论