R
roger_27
I've got a problem. I load a huge datatable, with 111 columns. This
datatable runs a System.OutOfMemory exception if its bigger than about
280,000 rows.
that's fine. I've come to accept that.
when I fill a datatable up to near its max limit of 280k rows, use of
program memory goes from about 100 MB, to 1 GIG!
that's also fine. I don't really have much choice there.
but then I go through a loop like this:
//dtResults is the HUGE datatable.
//I clone the datatable a few times. this makes new datatables
//with the same columns.
DataTable dtRejectedResults = new DataTable();
dtRejectedResults = dtResults.Clone();
DataTable dtExcludedResults = new DataTable();
dtExcludedResults = dtResults.Clone();
DataTable dtExcludedResults2 = new DataTable();
dtExcludedResults2 = dtResults.Clone();
DataTable dtExcludedResults3 = new DataTable();
dtExcludedResults3 = dtResults.Clone();
DataTable dtREGS = new DataTable();
dtREGS = dtResults.Clone();
DataTable dtREGSNCOABad = new DataTable();
dtREGSNCOABad = dtResults.Clone();
//now I loop through the HUGE datatable, and place the rows in separate,
//smaller datatables.
//***this is where the memory usage jumps from 1 gig, to 1.3 gigs.
//***why does it grow 300 MB in here????
for (int k = 0; k < dtResults.Rows.Count; k++)
{
if (dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "C" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "F" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "G" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "K")
{
DataRow row3 = dtREGSNCOABad.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtREGSNCOABad.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-1")
{
DataRow row3 = dtRejectedResults.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtRejectedResults.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-2")
{
DataRow row3 = dtExcludedResults.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-3")
{
DataRow row3 = dtExcludedResults2.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults2.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-4")
{
DataRow row3 = dtExcludedResults3.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults3.Rows.Add(row3);
}
else
{
DataRow row3 = dtREGS.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtREGS.Rows.Add(row3);
}
}
//this doesn't free up anything.
dtResults.clear()
so I tried removing rows as they are being added, like this
//dtResults is the HUGE datatable.
//I clone the datatable a few times. this makes new datatable
//with the same columns.
DataTable dtRejectedResults = new DataTable();
dtRejectedResults = dtResults.Clone();
DataTable dtExcludedResults = new DataTable();
dtExcludedResults = dtResults.Clone();
DataTable dtExcludedResults2 = new DataTable();
dtExcludedResults2 = dtResults.Clone();
DataTable dtExcludedResults3 = new DataTable();
dtExcludedResults3 = dtResults.Clone();
DataTable dtREGS = new DataTable();
dtREGS = dtResults.Clone();
DataTable dtREGSNCOABad = new DataTable();
dtREGSNCOABad = dtResults.Clone();
//now I loop through the HUGE datatable, and place the rows in separate,
//smaller datatables based on certain columns. just a bunch of ifs.
//***this is where the memory usage jumps from 1 gig, to 1.3 gigs.
//***why does it grow 300 MB in this loop ?????
for (int k = 0; k < dtResults.Rows.Count; k++)
{
if (dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "C" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "F" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "G" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "K")
{
DataRow row3 = dtREGSNCOABad.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtREGSNCOABad.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-1")
{
DataRow row3 = dtRejectedResults.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtRejectedResults.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-2")
{
DataRow row3 = dtExcludedResults.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-3")
{
DataRow row3 = dtExcludedResults2.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults2.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-4")
{
DataRow row3 = dtExcludedResults3.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults3.Rows.Add(row3);
}
else
{
DataRow row3 = dtREGS.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtREGS.Rows.Add(row3);
}
//***added this here
dtResults.Rows.Remove(dtResults.Rows[k]);
k = k - 1;
}
//memory usage doesnt change.
is there any ideas? or tips? or anything that someone can provide me to make
this more memory-efficient??
I know what I am doing that is making memory grow, I am taking a huge
datatable, looping through it all, making copies of those EXACT rows and
putting them into new datatables.
so yeah mem usage would grow, but then I call it to .clear() and it stays
the same. I've tried .dispose, gc.collect. nothing seems to change that.
so is there some other way I can do this using and use less memory??
thanks.
Roger.
datatable runs a System.OutOfMemory exception if its bigger than about
280,000 rows.
that's fine. I've come to accept that.
when I fill a datatable up to near its max limit of 280k rows, use of
program memory goes from about 100 MB, to 1 GIG!
that's also fine. I don't really have much choice there.
but then I go through a loop like this:
//dtResults is the HUGE datatable.
//I clone the datatable a few times. this makes new datatables
//with the same columns.
DataTable dtRejectedResults = new DataTable();
dtRejectedResults = dtResults.Clone();
DataTable dtExcludedResults = new DataTable();
dtExcludedResults = dtResults.Clone();
DataTable dtExcludedResults2 = new DataTable();
dtExcludedResults2 = dtResults.Clone();
DataTable dtExcludedResults3 = new DataTable();
dtExcludedResults3 = dtResults.Clone();
DataTable dtREGS = new DataTable();
dtREGS = dtResults.Clone();
DataTable dtREGSNCOABad = new DataTable();
dtREGSNCOABad = dtResults.Clone();
//now I loop through the HUGE datatable, and place the rows in separate,
//smaller datatables.
//***this is where the memory usage jumps from 1 gig, to 1.3 gigs.
//***why does it grow 300 MB in here????
for (int k = 0; k < dtResults.Rows.Count; k++)
{
if (dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "C" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "F" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "G" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "K")
{
DataRow row3 = dtREGSNCOABad.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtREGSNCOABad.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-1")
{
DataRow row3 = dtRejectedResults.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtRejectedResults.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-2")
{
DataRow row3 = dtExcludedResults.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-3")
{
DataRow row3 = dtExcludedResults2.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults2.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-4")
{
DataRow row3 = dtExcludedResults3.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults3.Rows.Add(row3);
}
else
{
DataRow row3 = dtREGS.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtREGS.Rows.Add(row3);
}
}
//this doesn't free up anything.
dtResults.clear()
so I tried removing rows as they are being added, like this
//dtResults is the HUGE datatable.
//I clone the datatable a few times. this makes new datatable
//with the same columns.
DataTable dtRejectedResults = new DataTable();
dtRejectedResults = dtResults.Clone();
DataTable dtExcludedResults = new DataTable();
dtExcludedResults = dtResults.Clone();
DataTable dtExcludedResults2 = new DataTable();
dtExcludedResults2 = dtResults.Clone();
DataTable dtExcludedResults3 = new DataTable();
dtExcludedResults3 = dtResults.Clone();
DataTable dtREGS = new DataTable();
dtREGS = dtResults.Clone();
DataTable dtREGSNCOABad = new DataTable();
dtREGSNCOABad = dtResults.Clone();
//now I loop through the HUGE datatable, and place the rows in separate,
//smaller datatables based on certain columns. just a bunch of ifs.
//***this is where the memory usage jumps from 1 gig, to 1.3 gigs.
//***why does it grow 300 MB in this loop ?????
for (int k = 0; k < dtResults.Rows.Count; k++)
{
if (dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "C" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "F" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "G" ||
dtResults.Rows[k]["Flag"].ToString().TrimEnd(' ') == "K")
{
DataRow row3 = dtREGSNCOABad.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtREGSNCOABad.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-1")
{
DataRow row3 = dtRejectedResults.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtRejectedResults.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-2")
{
DataRow row3 = dtExcludedResults.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-3")
{
DataRow row3 = dtExcludedResults2.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults2.Rows.Add(row3);
}
else if (dtResults.Rows[k]["PACKAGE"].ToString().TrimEnd(' ') == "-4")
{
DataRow row3 = dtExcludedResults3.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtExcludedResults3.Rows.Add(row3);
}
else
{
DataRow row3 = dtREGS.NewRow();
row3.ItemArray = dtResults.Rows[k].ItemArray;
dtREGS.Rows.Add(row3);
}
//***added this here
dtResults.Rows.Remove(dtResults.Rows[k]);
k = k - 1;
}
//memory usage doesnt change.
is there any ideas? or tips? or anything that someone can provide me to make
this more memory-efficient??
I know what I am doing that is making memory grow, I am taking a huge
datatable, looping through it all, making copies of those EXACT rows and
putting them into new datatables.
so yeah mem usage would grow, but then I call it to .clear() and it stays
the same. I've tried .dispose, gc.collect. nothing seems to change that.
so is there some other way I can do this using and use less memory??
thanks.
Roger.