I have a problem using CsvHelper to save a large amount of data (> 2GB) to Azure Blob Storage: I get the errore "Stream was too long". Is there anyone who can help me solve? Thanks in advance! This is my code:
public static void EXPORT_CSV(DataTable dt, string fileName, ILogger log)
{
try
{
// Retrieve storage account from connection string.
var cnStorage = Environment.GetEnvironmentVariable("cnStorage");
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(cnStorage);
// Create the blob client.
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
// Retrieve reference to a previously created container.
CloudBlobContainer container = blobClient.GetContainerReference("dataexport");
bool exists = container.CreateIfNotExists();
// Retrieve reference to a blob named "myblob".
CloudBlockBlob blockBlob = container.GetBlockBlobReference(fileName);
var stream = new MemoryStream();
using (var writer = new StreamWriter(stream))
using (var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture))
{
csvWriter.Configuration.TypeConverterOptionsCache.GetOptions<DateTime>().Formats = new[] { "dd/MM/yyyy" };
foreach (DataColumn column in dt.Columns)
{
csvWriter.WriteField(column.ColumnName);
}
csvWriter.NextRecord();
foreach (DataRow row in dt.Rows)
{
for (var i = 0; i < dt.Columns.Count; i++)
{
csvWriter.WriteField(row[i]);
}
csvWriter.NextRecord();
}
csvWriter.Flush();
writer.Flush();
stream.Position = 0;
log.LogInformation($"C# BatchDataExportCSVsegnalazioni START UploadFromStream at: {DateTime.Now}");
blockBlob.UploadFromStream(stream);
log.LogInformation($"C# BatchDataExportCSVsegnalazioni END UploadFromStream at: {DateTime.Now}");
}
}
catch (Exception ex)
{
log.LogError("Error upload BatchDataExportCSVsegnalazioni: " + ex.Message);
}
}
I solved writing direct to azure blob storage using blob.OpenWriteAsync():
public static async Task UPLOAD_CSVAsync(DataTable dt, string fileName, ILogger log)
{
try
{
// Retrieve storage account from connection string.
var cnStorage = Environment.GetEnvironmentVariable("cnStorage");
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(cnStorage);
// Create the blob client.
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
// Retrieve reference to a previously created container.
CloudBlobContainer container = blobClient.GetContainerReference("dataexport");
bool exists = container.CreateIfNotExists();
// Retrieve reference to a blob named "fileName".
CloudBlockBlob blockBlob = container.GetBlockBlobReference(fileName);
log.LogInformation($"C# BatchExpCSVsegnalazioni START UploadFromStream at: {DateTime.Now}");
await WriteDataTableToBlob(dt, blockBlob);
log.LogInformation($"C# BatchExpCSVsegnalazioni END UploadFromStream at: {DateTime.Now}");
}
catch (Exception ex)
{
log.LogError("error upload BatchExpCSVsegnalazioni: " + ex.Message);
}
}
public static async Task WriteDataTableToBlob(DataTable dt, CloudBlockBlob blob)
{
using (var writer = await blob.OpenWriteAsync())
using (var streamWriter = new StreamWriter(writer))
using (var csvWriter = new CsvWriter(streamWriter, CultureInfo.InvariantCulture))
{
csvWriter.Configuration.TypeConverterOptionsCache.GetOptions<DateTime>().Formats = new[] { "dd/MM/yyyy" };
foreach (DataColumn column in dt.Columns)
{
csvWriter.WriteField(column.ColumnName);
}
csvWriter.NextRecord();
foreach (DataRow row in dt.Rows)
{
for (var i = 0; i < dt.Columns.Count; i++)
{
csvWriter.WriteField(row[i]);
}
csvWriter.NextRecord();
}
csvWriter.Flush();
}
}