Search code examples
avr

W5100 is sending garbage


I try to implement a web interface with a W5100 Ethernet Controller and an XMega, but my browser prints out this weird result:

enter image description here

Please take a look at my code:

SPIM_Config_t Config_SPIM = {
    .Device = &SPIC,
    .Mode = SPI_MODE_0,
    .Prescaler = SPI_PRESCALER_64,
};

W5100_Config_t Config_Ethernet = {
    .Submask = {255, 255, 0, 0},
    .IP = {169, 254, 133, 121},
    .Gateway = {169, 154, 133, 129},
    .MAC = {0x00, 0x00, 0x00, 0x00, 0x00, 0xAA}
};

uint8_t Rx_Buffer[2048];
uint8_t Tx_Buffer[2048];

const char HTTP[]  =    "HTTP/1.0 200 OK\r\nContent-Type: text/html\r\nPragma: no-cache\r\n\r\n"
                        "<html>\r\n"
                            "<body>\r\n"
                                "<title>Title</title>\r\n"
                                "<p>Hello world</p>\r\n"
                            "</body>\r\n"
                        "</html>\r\n";

int main(void)
{
    W5100_Init(&Config_SPIM, &Config_Ethernet);

    while(1) 
    {
        W5100_Status_t Status;
        W5100_GetState(0, &Status);

        switch(Status)
        {
            case W5100_SOCK_CLOSED:
            {
                if(W5100_Open(0, W5100_PROT_TCP, 80, W5100_MEM_2K, W5100_MEM_2K, 65535) == W5100_NO_ERROR)
                {
                    W5100_Listen(0, ETHERNET_TIMEOUT);
                }

                break;
            }
            case W5100_SOCK_ESTABLISHED:
            {
                uint16_t Rx_Bytes;

                if(W5100_GetBytes(0, &Rx_Bytes) == W5100_NO_ERROR)
                {
                    if(Rx_Bytes)
                    {
                        W5100_Receive(0, Rx_Buffer, Rx_Bytes);

                        strcpy((char*)Tx_Buffer, HTTP);
                        W5100_Send(0, Tx_Buffer, strlen((char*)HTTP), ETHERNET_TIMEOUT);
                    }
                    else
                    {
                        
                    }
                }

                W5100_Disconnect(0, ETHERNET_TIMEOUT);

                break;
            }
            case W5100_SOCK_FIN_WAIT:
            case W5100_SOCK_CLOSING:
            case W5100_SOCK_TIME_WAIT:
            case W5100_SOCK_CLOSE_WAIT:
            case W5100_SOCK_LAST_ACK:
            {
                W5100_Close(0, ETHERNET_TIMEOUT);

                break;
            }
        }
    }
}

I think the error is somewhere in my W5100_Send function and it seems that the Controller is sending the content of different memory locations, but I can´t figure out the error. The code based on the datasheet of the Ethernet Controller:

W5100_ErrorCode_t W5100_Send(uint8_t Socket, uint8_t* Buffer, uint16_t Length, uint32_t Timeout)
{
    uint8_t Temp[2];
    uint8_t Mask;
    uint16_t SocketBase;
    uint16_t Offset;
    uint16_t Free;
    uint16_t SocketMemory;
    uint32_t Timeout_Temp = Timeout;

    if(!_W5100_IsInitialized)
    {
        return W5100_NOT_INITIALIZED;
    }
    else if((Socket > 0x04) || (Buffer == NULL) || (Length == 0x00))
    {
        return W5100_INVALID_PARAM;
    }

    // Get the memory mask for address calculation
    W5100_ReadRegister(W5100_REGISTER_TMSR, &Mask);
    Mask &= (0x03 << (Socket << 0x01));

    // Check for invalid memory by comparing the memory mask for the given socket and the socket index
    if(((Socket > 0) && (Mask == 3)) || ((Socket > 1) && (Mask == 2)))
    {
        return W5100_INVALID_PARAM;
    }

    SocketBase = W5100_SOCKET_ADDR(Socket);
    SocketMemory = W5100_SOCKET_MEM_OFFSET << Mask;

    // Wait while the buffer is full
    do
    {
        // Get the free bytes
        W5100_ReadRegister(SocketBase + W5100_OFFSET_TX_FSR0, &Temp[0]);
        W5100_ReadRegister(SocketBase + W5100_OFFSET_TX_FSR1, &Temp[1]);
        Free = ((uint16_t)(Temp[0] << 0x08)) | Temp[1];

        if(Timeout_Temp-- == 0x00)
        {
            W5100_Disconnect(Socket, Timeout);

            return W5100_TIMEOUT;
        }

        _delay_ms(1);
    }while(Free < Length);

    // Get the write pointer address
    W5100_ReadRegister(SocketBase + W5100_OFFSET_TX_WR0, &Temp[0]);
    W5100_ReadRegister(SocketBase + W5100_OFFSET_TX_WR1, &Temp[1]);
    Offset = (((uint16_t)(Temp[0] << 0x08)) | Temp[1]) & W5100_TX_MEM_MASK;

    // Check for an overflow
    if(Offset + Length > SocketMemory)
    {
        uint16_t Upper;
        uint16_t Left;

        Upper = SocketMemory - Offset;
        Left = Length - Upper;

        W5100_WriteMemory(W5100_TX_BUFFER_BASE + (SocketMemory * Socket) + Offset, Buffer, Upper);
        W5100_WriteMemory(W5100_TX_BUFFER_BASE + (SocketMemory * Socket), Buffer, Left);
    }
    else
    {
        W5100_WriteMemory(W5100_TX_BUFFER_BASE + (SocketMemory * Socket) + Offset, Buffer, Length);
    }

    W5100_WriteRegister(SocketBase + W5100_OFFSET_TX_WR0, Offset >> 0x08);
    W5100_WriteRegister(SocketBase + W5100_OFFSET_TX_WR1, Offset & 0xFF);

    return W5100_ExecuteCommand(Socket, W5100_CMD_SEND, Timeout);
}

Solution

  • You should fully rewrite your W5100_Send, because it is full of issues.

    For example, calculation of Mask value has no sense.

    The cycle which is waiting for Free value always delays at least 1 ms, even when good value obtained from the beginning. Also, when timed out, it breaks, even if received Free value is good.

    Offset value is damaged by & operation:

    Offset = (((uint16_t)(Temp[0] << 0x08)) | Temp[1]) & W5100_TX_MEM_MASK;

    This value is never increased by the written data size, and the damaged value is written back to W5100_OFFSET_TX_WR1:W5100_OFFSET_TX_WR0

    The wrapping data writing has an error:

            W5100_WriteMemory(W5100_TX_BUFFER_BASE + (SocketMemory * Socket) + Offset, Buffer, Upper);
            W5100_WriteMemory(W5100_TX_BUFFER_BASE + (SocketMemory * Socket), Buffer, Left);
    

    You're copying to both the parts from the start of Buffer. In the second line it should be &Buffer[Upper]

    Etc etc...

    First you need to determine size of sockets. I encourage you to set up the socket sizes from the beginning, thus avoiding offset and size calculation on the runtime.

    But if you want to determine the socket size dynamically, then you can do it as follows:

        uint16_t SocketBufAddr = W5100_TX_BUFFER_BASE; // Start of the socket memory block
        SocketMemory = 0; // Size of the socket memory block
        W5100_ReadRegister(W5100_REGISTER_TMSR, &Mask);
        for (uint8_t i = 0 ; i <= Socket ; i++) {
            SocketBufAddr += SocketMemory; // Increase the offset by the previous socket size
            SocketMemory = 1024 << ((Mask >> (i * 2)) & 3);
        }
    

    now, the writing process should be something like this:

    // Get the write pointer address
        W5100_ReadRegister(SocketBase + W5100_OFFSET_TX_WR0, &Temp[0]);
        W5100_ReadRegister(SocketBase + W5100_OFFSET_TX_WR1, &Temp[1]);
        uint16_t WrPointer = (((uint16_t)(Temp[0] << 0x08)) | Temp[1]); // no & operation! It is the 16-bit pointer!!!
    
        
        Offset = WrPointer & (SocketMemory - 1); // Offset inside the socket memory block. SocketMemory is always = 2^n
    
        // Check for an overflow
        if(Offset + Length > SocketMemory)
        {
            uint16_t Upper;
            uint16_t Left;
    
            Upper = SocketMemory - Offset ;
            Left = Length - Upper;
    
            W5100_WriteMemory(SocketBufAddr + Offset, Buffer, Upper);
            W5100_WriteMemory(SocketBufAddr, &Buffer[Upper], Left);
        }
        else
        {
            W5100_WriteMemory(SocketBufAddr + Offset, Buffer, Length);
        }
    
        WrPointer += Length; // Increase full 16-bit pointer value
    
        // Write the new pointer back
        W5100_WriteRegister(SocketBase + W5100_OFFSET_TX_WR0, WrPointer >> 0x08);
        W5100_WriteRegister(SocketBase + W5100_OFFSET_TX_WR1, WrPointer & 0xFF);
    
        return W5100_ExecuteCommand(Socket, W5100_CMD_SEND, Timeout);