I have seen many solutions to this problem where people will just use Command.ExecuteScalar as byte[];
but their SQL queries are getting one varbinary field at a time. I am trying to select about 30k rows of varbinary entries, but them in a byte[] and deserialize.
Here is my code:
public void MNAdapter()
{
IsoStorage retVal = new IsoStorage();
SqlConnectionStringBuilder csb = new SqlConnectionStringBuilder();
csb.DataSource = @"LocalMachine\SQLDEV";
csb.InitialCatalog = "Support";
csb.IntegratedSecurity = true;
string connString = csb.ToString();
using (SqlConnection conn = new SqlConnection(connString))
{
conn.Open();
SqlCommand command = conn.CreateCommand();
command.CommandText = @"SELECT S.Settings
from Support.dbo.SavedLocalSettings S
inner join WebCatalog.Published.People P
on P.PKey = S.PeopleLink
inner join WebCatalog.Published.Company C
on P.Link = C.PeopleList
where S.DateSaved >= GETDATE()-34
and C.PKey != '530F4622-C30D-DD11-A23A-00304834A8C9'
and C.PKey != '7BAF7229-9249-449E-BEA5-4B366D7ECCD1'
and C.PKey != 'CCBB2140-C30D-DD11-A23A-00304834A8C9'
and S.CompanyName not like 'Tech Support%'
Group By S.PeopleLink, S.Settings";
using (SqlDataReader reader = command.ExecuteReader(CommandBehavior.SequentialAccess))
{
//DataTable dt = new DataTable();
//dt.Load(reader);
byte[] blob = null;
BinaryFormatter bf = new BinaryFormatter();
bf.Binder = new CustomBinder();
while (reader.Read())
{
reader.GetBytes(0,0,blob,0,100000);
Console.WriteLine(blob.ToString());
retVal = bf.Deserialize(new MemoryStream(blob)) as IsoStorage;
}
}
}
}
I also tried putting them in a Data tabla first even though I thought that would be redundant, but they get read in as integers.
I don't get any errors and the data is going into the data reader, but it's like reader.GetBytes(0,0,blob,0,100000);
is not even running because blob stays null.
Why not use:
blob = (byte[])reader.Items["Settings"];
Or
blob = (byte[])reader["Settings"];