Search code examples
ctypessymbols

Long division 1/N algorithm using string to store digits, creates symbols/letters when too long


I am trying to make a long division algorithm specifically for the case of 1/N. I have gotten the code to work correctly, however the data types are doing some "funky business" when the string storing digits reaches length ~>20 digits.

You can change the b variable to test any other fraction.

Here is my code:

#include <stdio.h>
#include <string.h>
#include <stdlib.h>

int main()
{
    int a = 1;
    int b = 666;
    int c = 0;
    int d = 1;
    int z = 0;

    char *decimals = "0.";
    size_t len = 2;

    while (d>0)
    {
        printf("a: %d   ",a);
        printf("b: %d   ",b);
        printf("c: %d   ",c);
        printf("d: %d\n",d);
    
        if(a<b)
        {
            a = a*10;
        
            if(a<b)
            {
                len = len + 2;
            
                char *temp = calloc(len, sizeof(char));
                sprintf(temp, "%s%d",decimals,z);
                decimals = temp;
                printf("%s\n\n", decimals);
            }
            else
            {
                c = a/b;
                d = a-b*c;
                a = d;
            
                char *temp = calloc(len, sizeof(char));
                sprintf(temp, "%s%d",decimals,c);
                decimals = temp;
                printf("%s\n\n", decimals);
            }
        }
    }   
    free(decimals);
}

And the CMD Output:

enter image description here

What is causing the # character to appear? If run for longer many more random letter/symbols appear.


Solution

  • I think that the way memory is being allocated is the problem. You wind up writing past the amount of allocated memory in the else statement. The length is not being expanded when it should be.

    #include <stdio.h>
    #include <stdlib.h>
    #include <string.h>
    
    int main()
    {
        int a = 1;
        int b = 666;
        int c = 0;
        int d = 1;
        int z = 0;
        int k = 0;
    
        char * decimals = (char*)malloc(sizeof(char) * 3);
        if (!decimals)
        {
            printf("alloc failed!");
            return 1;
        }
        sprintf(decimals, "0.");
    
        size_t len = 2;
        while (d > 0 && k++ < 1000)
        {
    
            printf("a: %d   ", a);
            printf("b: %d   ", b);
            printf("c: %d   ", c);
            printf("d: %d\n", d);
    
            if (a < b)
            {
                a = a * 10;
    
                if (a < b)
                {
                    len += 2;
                    char *temp = (char *)malloc(sizeof(char) * len);
                    if (!temp)
                    {
                        printf("alloc failed!");
                        break;
                    }
    
                    snprintf(temp, len, "%s%d", decimals, z);
                    free(decimals);
                    decimals = temp;
                    printf("%s\n\n", decimals);
                }
                else
                {
                    c = a / b;
                    d = a - b * c;
                    a = d;
    
                    len += 2;
                    char *temp = (char *)malloc(sizeof(char) * len);
                    if (!temp)
                    {
                        printf("alloc failed!");
                        break;
                    }
                    snprintf(temp, len, "%s%d", decimals, c);
                    free(decimals);
                    decimals = temp;
                    printf("%s\n\n", decimals);
                }
            }
        }
    
        free(decimals);
    }