Search code examples
c#sql-serversqlbulkcopy

c# importing large ASCII file to sql-server and breaking it down


I'm currently tasked with importing 8 millions rows from an ASCII file to an SQL database.

I already formated the rows as an entity ready for manipulation but i can't find a way to break this enormous file into multiple DataTable to use with SQLBulkCopy...

Would you guys have any idea on how to proceed ? I'd like to use this solution Process large file in chunks or not but i don't know where to begin to start breaking my 1Go file down...

Thanks for your help.


Solution

  • I found the solution, for the people who will have the same issue. Just had to read each line then add it to a datable, once the DT is at batch size we send it then clear it for the next batch, at the end we send what's left even if datable is not at batch size :

                using (FileStream stream = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
    
    
                using (BufferedStream bufferedStream = new BufferedStream(stream))
    
    
                using (StreamReader streamReader = new StreamReader(bufferedStream))
                {
                    string connectionString = @"connectionstring";
                    using (SqlConnection conn = new SqlConnection(connectionString))
                    {
                        conn.Open();
    
                        while ((line = streamReader.ReadLine()) != null)
                        {
                            dfdfdf = line.Substring(42, 1);
                            fdfdf = line.Substring(45, 1);
    
                            DataRow row = dt.NewRow();
                            row["dfdfdf"] = dfdfdf;
                            row["fdfdf"] = fdfdf;
    
                            dt.Rows.Add(row);
    
                            if (dt.Rows.Count == batchSize)
                            {
                                try
                                {
                                    Console.WriteLine("Batch sent");
                                    using (SqlBulkCopy bulkCopy = new SqlBulkCopy(conn))
                                    {
                                bulkCopy.ColumnMappings.Add("dfdfdf", "dfdfdf");
                                bulkCopy.ColumnMappings.Add("fdfdf", "fdfdf");
    
                                        bulkCopy.DestinationTableName = "table";
                                        bulkCopy.WriteToServer(dt);
                                    }
    
                                    dt.Clear();
    
                                }
                                catch (Exception e)
                                {
                                    Console.WriteLine(e);
                                }
    
                            }
                        }
    
                        try
                        {
                            Console.WriteLine("Last batch sent");
                            using (SqlBulkCopy bulkCopy = new SqlBulkCopy(conn))
                            {
                                bulkCopy.ColumnMappings.Add("dfdfdf", "dfdfdf");
                                bulkCopy.ColumnMappings.Add("fdfdf", "fdfdf");
    
                                bulkCopy.DestinationTableName = "table";
                                bulkCopy.WriteToServer(dt);
                            }
    
                            dt.Clear();
    
                        }
                        catch (Exception e)
                        {
                            Console.WriteLine(e);
                        }
                    }