我在使用CsvHelper将大量数据(> 2GB)保存到Azure Blob存储时遇到了问题:我得到的错误是"Stream太长“。有人能帮我解决吗?提前感谢!这是我的密码:
public static void EXPORT_CSV(DataTable dt, string fileName, ILogger log)
{
try
{
// Retrieve storage account from connection string.
var cnStorage = Environment.GetEnvironmentVariable("cnStorage");
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(cnStorage);
// Create the blob client.
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
// Retrieve reference to a previously created container.
CloudBlobContainer container = blobClient.GetContainerReference("dataexport");
bool exists = container.CreateIfNotExists();
// Retrieve reference to a blob named "myblob".
CloudBlockBlob blockBlob = container.GetBlockBlobReference(fileName);
var stream = new MemoryStream();
using (var writer = new StreamWriter(stream))
using (var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture))
{
csvWriter.Configuration.TypeConverterOptionsCache.GetOptions<DateTime>().Formats = new[] { "dd/MM/yyyy" };
foreach (DataColumn column in dt.Columns)
{
csvWriter.WriteField(column.ColumnName);
}
csvWriter.NextRecord();
foreach (DataRow row in dt.Rows)
{
for (var i = 0; i < dt.Columns.Count; i++)
{
csvWriter.WriteField(row[i]);
}
csvWriter.NextRecord();
}
csvWriter.Flush();
writer.Flush();
stream.Position = 0;
log.LogInformation($"C# BatchDataExportCSVsegnalazioni START UploadFromStream at: {DateTime.Now}");
blockBlob.UploadFromStream(stream);
log.LogInformation($"C# BatchDataExportCSVsegnalazioni END UploadFromStream at: {DateTime.Now}");
}
}
catch (Exception ex)
{
log.LogError("Error upload BatchDataExportCSVsegnalazioni: " + ex.Message);
}
}发布于 2021-09-16 11:25:55
我解决了使用blob.OpenWriteAsync()直接编写到azure存储的问题:
public static async Task UPLOAD_CSVAsync(DataTable dt, string fileName, ILogger log)
{
try
{
// Retrieve storage account from connection string.
var cnStorage = Environment.GetEnvironmentVariable("cnStorage");
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(cnStorage);
// Create the blob client.
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
// Retrieve reference to a previously created container.
CloudBlobContainer container = blobClient.GetContainerReference("dataexport");
bool exists = container.CreateIfNotExists();
// Retrieve reference to a blob named "fileName".
CloudBlockBlob blockBlob = container.GetBlockBlobReference(fileName);
log.LogInformation($"C# BatchExpCSVsegnalazioni START UploadFromStream at: {DateTime.Now}");
await WriteDataTableToBlob(dt, blockBlob);
log.LogInformation($"C# BatchExpCSVsegnalazioni END UploadFromStream at: {DateTime.Now}");
}
catch (Exception ex)
{
log.LogError("error upload BatchExpCSVsegnalazioni: " + ex.Message);
}
}
public static async Task WriteDataTableToBlob(DataTable dt, CloudBlockBlob blob)
{
using (var writer = await blob.OpenWriteAsync())
using (var streamWriter = new StreamWriter(writer))
using (var csvWriter = new CsvWriter(streamWriter, CultureInfo.InvariantCulture))
{
csvWriter.Configuration.TypeConverterOptionsCache.GetOptions<DateTime>().Formats = new[] { "dd/MM/yyyy" };
foreach (DataColumn column in dt.Columns)
{
csvWriter.WriteField(column.ColumnName);
}
csvWriter.NextRecord();
foreach (DataRow row in dt.Rows)
{
for (var i = 0; i < dt.Columns.Count; i++)
{
csvWriter.WriteField(row[i]);
}
csvWriter.NextRecord();
}
csvWriter.Flush();
}
}发布于 2021-09-14 12:35:37
错误可能是由于对大数据使用MemoryStream而不是使用csvHelper造成的。看看是否可以通过以下方式解决问题:
(或)
cloudblockblob库在蔚蓝存储中创建文件,方法是使用来自程序集Azure.Storage.Blobs和命名空间Azure.Storage.Blobs.Specialized的扩展方法:例如:
var stream = blob.OpenWrite()https://stackoverflow.com/questions/69127949
复制相似问题