I have to compute e^(x) using taylor series and error bound. We were instructed to make 3 separate functions, then use them to compute e^(x) with an accuracy and x value that the user inputs. The compiler finally compiles the code, however if I enter x=2, and delta=(anything), I get sum=3.000 instead of 7.3890. Im assuming its my final loop (sum loops) that has the error. Any help is appreciated.
Code:
#include<stdio.h>
#include<math.h>
float power(float A, int B)
{
float sum=1.0;
int nterms=1;
while ( nterms <= B && B > 0)
{
sum = A*sum;
nterms++;
}
return sum;
}
int factorial(int b)
{
int fact=1;
while (b >= 2)
{
fact = b*(b-1)*fact;
b = b-2;
}
return fact;
}
int Terms(float X, float a)
{
int N=1,l;
float L,R;
l=N+1;
while (L < a && a <= R)
{
L= (power(X,l)/(factorial(l)));
R= (power(X,N)/(factorial(N)));
N++;
}
return N;
}
int main()
{
float x, delta, term=0.0, sum=0.0;
int n, Nterms;
printf("Please enter a decimal number. x=");
scanf("%f",x);
printf("Please enter an another number. delta=");
scanf("%d",delta);
Nterms=Terms(x,delta);
for(n=0;n<Nterms;n++)
{
if( n==0 || n==1 )
{
sum = 1 + x;
}
else if (n>=2 && n<Nterms)
{
sum = sum + term;
term = (power(x,n))/(factorial(n));
}
}
printf("The approximation for e^(%f)=%f",x,sum);
return 0;
}
Change
scanf("%f",x);
scanf("%d",delta);
to
scanf("%f", &x);
scanf("%f", &delta);
Error was in how you read the user input. scanf()
will read on to the location pointed by the pointer.