I know this question has been asked before, but the answer provided in Logistic regression in Julia using Optim.jl doesn't work anymore. My code looks like this....
sigmoid(x) = 1 ./ (1 .+ exp.(-x));
function costfunction(θ,X,y)
m = length(y);
J = 0;
grad = zeros(size(θ));
c(X,i,θ)=sigmoid(θ[1]+X[i,2]*θ[2]+X[i,3]*θ[3]);
for i in 1:m
d = c(X,i,θ);
J += y[i]==0 ? (-log(1-d)) : (-log(d));
end
J/=m;
for i in 1 : length(θ)
for j in 1:m
grad[i] += (c(X,j,θ) - y[j])*X[j,i];
end
grad[i]/=m;
end
return J,grad;
end
cost, grad! = costfunction(initial_theta,X,y);
res = optimize(cost, grad!, , method = ConjugateGradient(), iterations = 1000); `
initial_theta is [0,0,0]
X is a 99x3 DataFrame(the first column is 1s), y is a vector with 99 elements
How to find the theta for the minimized function using Optim.jl?
cost(θ)=costfunction(θ,X,y)[1];
grad!(θ)=costfunction(θ,X,y)[2];
res = optimize(cost, grad!, initial_theta, LBFGS();inplace = false);
θ = Optim.minimizer(res);
inplace = false parameter was there in the optim.jl documentation https://julianlsolvers.github.io/Optim.jl/stable/#user/minimization/