This is an implementation of Naive Bayes Classifier Algorithm.
I couldn't understand the line score.Add(results[i].Name, finalScore * 0.5);
.
Where does this value 0.5 come from?
Why 0.5? Why not any other value?
public string Classify(double[] obj)
{
Dictionary<string,> score = new Dictionary<string,>();
var results = (from myRow in dataSet.Tables[0].AsEnumerable()
group myRow by myRow.Field<string>(
dataSet.Tables[0].Columns[0].ColumnName) into g
select new { Name = g.Key, Count = g.Count() }).ToList();
for (int i = 0; i < results.Count; i++)
{
List<double> subScoreList = new List<double>();
int a = 1, b = 1;
for (int k = 1; k < dataSet.Tables["Gaussian"].Columns.Count; k = k + 2)
{
double mean = Convert.ToDouble(dataSet.Tables["Gaussian"].Rows[i][a]);
double variance = Convert.ToDouble(dataSet.Tables["Gaussian"].Rows[i][++a]);
double result = Helper.NormalDist(obj[b - 1], mean, Helper.SquareRoot(variance));
subScoreList.Add(result);
a++; b++;
}
double finalScore = 0;
for (int z = 0; z < subScoreList.Count; z++)
{
if (finalScore == 0)
{
finalScore = subScoreList[z];
continue;
}
finalScore = finalScore * subScoreList[z];
}
score.Add(results[i].Name, finalScore * 0.5);
}
double maxOne = score.Max(c => c.Value);
var name = (from c in score
where c.Value == maxOne
select c.Key).First();
return name;
}
I figured it out.
0.5 is the apriori probability.