SqlBulkCopy ColumnMapping 错误 CSV 到数据Table 到SQL 服务器Table
SqlBulkCopy ColumnMapping error CSV to DataTable to SQL Server Table
我正在尝试将 CSV 文件批量插入到 SQL 服务器数据库中。
进程是.CSV
文件到DataTable
到SqlBulkCopy
到SQL服务器。
当我运行这个时,我得到这个错误:
The given ColumnMapping does not match up with any column in the source or destination
当我通过 Management Studio 直接将 CSV 导入 SQL 服务器时,它起作用了!所以我认为问题是我的数据 table conversion?
这是我从 .CSV
到 DataTable
的代码:
public DataTable CsvFileToDataTable(string filePath)
{
var csvData = new DataTable("Table1");
using (var csvReader = new TextFieldParser(filePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
foreach (var dataColumn in readFields.Select(column => new DataColumn(column) {AllowDBNull = true }))
{
csvData.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
for (var i = 0; i < data.Length; i++)
{
if (data[i] == "")
{
data[i] = null;
}
}
csvData.Rows.Add(data);
}
}
return csvData;
}
这里是批量复制插入的代码:
public void InsertData(DataTable table)
{
using (var transactionScope = new TransactionScope())
{
using (var sqlConnection = new SqlConnection(this.ConnectionString))
{
sqlConnection.Open();
using (var sqlBulkCopy = new SqlBulkCopy(sqlConnection))
{
sqlBulkCopy.DestinationTableName = table.TableName;
foreach (var column in table.Columns)
{
sqlBulkCopy.ColumnMappings.Add(column.ToString(), column.ToString());
}
sqlBulkCopy.WriteToServer(table);
}
transactionScope.Complete();
}
}
}
有人有什么建议吗?
谢谢
原来如果我调整每个方法
foreach (var dataColumn in readFields.Select(column => new DataColumn(columntypeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
csvData.Columns.Add(dataColumn);
}
在第二种方法中,我将其更改为使用索引而不是列名
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
我能够使用这些代码示例创建一个读取 csv 的工作解决方案,检查是否有数据,如果有则清除数据并将所有 csv 数据导入 table :
private static void ProcessFile(string FilePath, string TableName)
{
var dt = GetDataTable(FilePath, TableName);
if (dt == null)
{
return;
}
if (dt.Rows.Count == 0)
{
AuditLog.AddInfo("No rows imported after reading file " + FilePath);
return;
}
ClearData(TableName);
InsertData(dt);
}
private static DataTable GetDataTable(string FilePath, string TableName)
{
var dt = new DataTable(TableName);
using (var csvReader = new TextFieldParser(FilePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
if (readFields == null)
{
AuditLog.AddInfo("Could not read header fields for file " + FilePath);
return null;
}
foreach (var dataColumn in readFields.Select(column => new DataColumn(column, typeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
dt.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
if (data == null)
{
AuditLog.AddInfo(string.Format("Could not read fields on line {0} for file {1}", csvReader.LineNumber, FilePath));
continue;
}
var dr = dt.NewRow();
for (var i = 0; i < data.Length; i++)
{
if (!string.IsNullOrEmpty(data[i]))
{
dr[i] = data[i];
}
}
dt.Rows.Add(dr);
}
}
return dt;
}
private static void ClearData(string TableName)
{
SqlHelper.ExecuteNonQuery(ConfigurationUtil.ConnectionString, CommandType.Text, "TRUNCATE TABLE " + TableName);
}
private static void InsertData(DataTable table)
{
using (var sqlBulkCopy = new SqlBulkCopy(ConfigurationUtil.ConnectionString))
{
sqlBulkCopy.DestinationTableName = table.TableName;
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
sqlBulkCopy.WriteToServer(table);
}
}
我正在尝试将 CSV 文件批量插入到 SQL 服务器数据库中。
进程是.CSV
文件到DataTable
到SqlBulkCopy
到SQL服务器。
当我运行这个时,我得到这个错误:
The given ColumnMapping does not match up with any column in the source or destination
当我通过 Management Studio 直接将 CSV 导入 SQL 服务器时,它起作用了!所以我认为问题是我的数据 table conversion?
这是我从 .CSV
到 DataTable
的代码:
public DataTable CsvFileToDataTable(string filePath)
{
var csvData = new DataTable("Table1");
using (var csvReader = new TextFieldParser(filePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
foreach (var dataColumn in readFields.Select(column => new DataColumn(column) {AllowDBNull = true }))
{
csvData.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
for (var i = 0; i < data.Length; i++)
{
if (data[i] == "")
{
data[i] = null;
}
}
csvData.Rows.Add(data);
}
}
return csvData;
}
这里是批量复制插入的代码:
public void InsertData(DataTable table)
{
using (var transactionScope = new TransactionScope())
{
using (var sqlConnection = new SqlConnection(this.ConnectionString))
{
sqlConnection.Open();
using (var sqlBulkCopy = new SqlBulkCopy(sqlConnection))
{
sqlBulkCopy.DestinationTableName = table.TableName;
foreach (var column in table.Columns)
{
sqlBulkCopy.ColumnMappings.Add(column.ToString(), column.ToString());
}
sqlBulkCopy.WriteToServer(table);
}
transactionScope.Complete();
}
}
}
有人有什么建议吗?
谢谢
原来如果我调整每个方法
foreach (var dataColumn in readFields.Select(column => new DataColumn(columntypeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
csvData.Columns.Add(dataColumn);
}
在第二种方法中,我将其更改为使用索引而不是列名
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
我能够使用这些代码示例创建一个读取 csv 的工作解决方案,检查是否有数据,如果有则清除数据并将所有 csv 数据导入 table :
private static void ProcessFile(string FilePath, string TableName)
{
var dt = GetDataTable(FilePath, TableName);
if (dt == null)
{
return;
}
if (dt.Rows.Count == 0)
{
AuditLog.AddInfo("No rows imported after reading file " + FilePath);
return;
}
ClearData(TableName);
InsertData(dt);
}
private static DataTable GetDataTable(string FilePath, string TableName)
{
var dt = new DataTable(TableName);
using (var csvReader = new TextFieldParser(FilePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
if (readFields == null)
{
AuditLog.AddInfo("Could not read header fields for file " + FilePath);
return null;
}
foreach (var dataColumn in readFields.Select(column => new DataColumn(column, typeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
dt.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
if (data == null)
{
AuditLog.AddInfo(string.Format("Could not read fields on line {0} for file {1}", csvReader.LineNumber, FilePath));
continue;
}
var dr = dt.NewRow();
for (var i = 0; i < data.Length; i++)
{
if (!string.IsNullOrEmpty(data[i]))
{
dr[i] = data[i];
}
}
dt.Rows.Add(dr);
}
}
return dt;
}
private static void ClearData(string TableName)
{
SqlHelper.ExecuteNonQuery(ConfigurationUtil.ConnectionString, CommandType.Text, "TRUNCATE TABLE " + TableName);
}
private static void InsertData(DataTable table)
{
using (var sqlBulkCopy = new SqlBulkCopy(ConfigurationUtil.ConnectionString))
{
sqlBulkCopy.DestinationTableName = table.TableName;
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
sqlBulkCopy.WriteToServer(table);
}
}