Search code examples
c#unity-game-enginez-orderoctreez-order-curve

How to navigate Octree using morton code?


I am using octree in my game and I am using morton code (z-order) to navigate on it.

Somehow it doesn't work correctly and I can't figure out why - nodes are inserted in wrong positions (screenshots bellow).

I've alredy asked gpt and deepseek milion times, but they tell me only some misleading informations.

Here are crucial code fragments, please help:

  1. Morton support arrays init, MortonMapping is for reversing from true local idx, to z-ordered local idx and vice-versa
    static Utils()
    {
        thresholds[0] = Consts.Lod0Range;
        for (int i = 1; i <= Consts.MaxLod; i++)
            thresholds[i] = thresholds[i - 1] * 2f;
        for (int i = 0; i < 3; i++)
        {
            morton256[i] = new uint[256];
            for (uint j = 0; j < 256; j++)
                morton256[i][j] = SpreadBits(j, i);
        }
        for (int x = 0; x < 2; x++)
            for (int y = 0; y < 2; y++)
                for (int z = 0; z < 2; z++)
                    MortonMapping[x * 4 + y * 2 + z] = (int)(MortonEncode(new Vector3Int(x, y, z)) & 0b111);
    }

    private static uint SpreadBits(uint x, int offset)
    {
        uint result = 0;
        for (int i = 0; i < 8; i++)
        {
            result <<= 3;
            result |= ((x >> (7 - i)) & 1) << offset;
        }
        return result;
    }
  1. Morton encoding
    public static ulong MortonEncode(Vector3Int vector)
    {
        if (vector.x < 0 || vector.y < 0 || vector.z < 0)
            throw new System.Exception("Negative morton");
        ulong answer =
        morton256[0][(vector.x >> 16) & 0xFF] |
        morton256[1][(vector.y >> 16) & 0xFF] |
        morton256[2][(vector.z >> 16) & 0xFF];
        answer = answer << 24 |
        morton256[0][(vector.x >> 8) & 0xFF] |
        morton256[1][(vector.y >> 8) & 0xFF] |
        morton256[2][(vector.z >> 8) & 0xFF];
        answer = answer << 24 |
        morton256[0][(vector.x) & 0xFF] |
        morton256[1][(vector.y) & 0xFF] |
        morton256[2][(vector.z) & 0xFF];
        return answer;
    }
  1. Get local child index from morton code, for specific octree level
    [MethodImpl(MethodImplOptions.AggressiveInlining)]
    public static int MortonIndexForLevel(int level, ulong mortonIndex)
    {
        return (int)(mortonIndex >> (level * 3)) & 0b111;
    }
  1. Get local child position from morton code (relative to parent) - this is used in drawing, which I am not including, as I am sure that problem is in the insertion mechanism
    [MethodImpl(MethodImplOptions.AggressiveInlining)]
    public static Vector3Int FlatIndexToVector(int realIdx)
    {
        return new Vector3Int((realIdx & 4) >> 2, (realIdx & 2) >> 1, realIdx & 1);
    }
  1. Recursive insertion
    //coords are relative to world 0,0,0 measured in smallest possible node size
    public void Insert(byte lod, Vector3Int coords, NativeArray<ushort> data)
    {
        EnsureSpace(coords);
        Chunk chunk = !data.IsCreated ? null : new Chunk(data);
        ulong idx = Utils.MortonEncode(coords - beginCorner); //Substract beginCorner of octree in order to localize coords

        //Debug section, no logic here
        Vector3 center = FromGlobalCoords(coords) + Vector3.one * (1 << lod) * lod0ChunkSize / 2;
        Debug.DrawLine(lastPos, center, Color.white, 4000000, false);
        lastPos = center;
        Debug.Log("Insert " + _root.Lod + ", " + lod + ", " + idx + " at: " + coords + ", " + (coords - beginCorner) + ", " + FromGlobalCoords(coords) + ", " + beginCorner + ", " + (chunk == null ? "empty" : "ok"));
        Debug.Log(Utils.ToMortonString(idx));

        InsertRecursive(_root, new Node(lod, chunk), idx, beginCorner, FromGlobalCoords(beginCorner));
    }

    private void InsertRecursive(Node parent, Node value, ulong idx, /*debug param*/ Vector3Int coords, /*debug param*/ Vector3 lastCenter)
    {
        byte lod = (byte)(parent.Lod - 1);
        int curIdx = Utils.MortonIndexForLevel(lod, idx);

        //Debug section, no logic here
        Vector3Int localCoords = Utils.FlatIndexToVector(Utils.MortonMapping[curIdx]);
        coords += localCoords * (1 << lod);
        Vector3 center = FromGlobalCoords(coords) + Vector3.one * (1 << lod) * lod0ChunkSize / 2;
        if (idx == 12582911)
        {
            Debug.Log("Go down: " + parent.Lod + ", " + localCoords + ", " + coords + ", " + center + ", " + FromGlobalCoords(coords));
            Debug.DrawLine(lastCenter, center, Color.Lerp(Color.green, Color.blue, lod / 10f), 4000000, false);
        }

        if (lod == value.Lod)
        {
            parent[curIdx] = value;
            return;
        }
        if (parent[curIdx] == null)
            parent[curIdx] = new Node(lod);
        InsertRecursive(parent[curIdx], value, idx, coords, center);
    }

Here are some debug images, maybe it will help (white lines are true positions in incoming order - it is z-order because there is another octree on server which is sending this data):

enter image description here

enter image description here

enter image description here

Edit 1: I just tested this LUT implementation of morton code, but nothing changed: https://www.forceflow.be/2013/10/07/morton-encodingdecoding-through-bit-interleaving-implementations/

Edit 2: I've changed insertion method to different technique (without morton), but problem still exists:

    public void Insert(byte lod, Vector3Int coords, NativeArray<ushort> data)
    {
        EnsureSpace(coords);
        Chunk chunk = !data.IsCreated ? null : new Chunk(data);
        ulong idx = Utils.MortonEncode(coords - beginCorner); //Substract beginCorner of octree in order to localize coords

        //Debug section, no logic here
        Vector3 center = FromGlobalCoords(coords) + Vector3.one * (1 << lod) * lod0ChunkSize / 2;
        Debug.DrawLine(lastPos, center, Color.white, 4000000, false);
        lastPos = center;
        Debug.Log("Insert " + _root.Lod + ", " + lod + ", " + idx + " at: " + coords + ", " + (coords - beginCorner) + ", " + FromGlobalCoords(coords) + ", " + beginCorner + ", " + (chunk == null ? "empty" : "ok"));
        Debug.Log(Utils.ToMortonString(idx));

        InsertRecursive(_root, new Node(lod, chunk), idx, Vector3Int.zero, coords - beginCorner);
    }

    private void InsertRecursive(Node parent, Node value, ulong idx, Vector3Int parentCorner, Vector3Int coords)
    {
        byte lod = (byte)(parent.Lod - 1);
        int curIdx = Utils.MortonIndexForLevel(lod, idx);

        Vector3Int parentCenter = parentCorner + Vector3Int.one * (1 << lod);
        int index = 0;
        if (coords.x >= parentCenter.x)
            index |= 4;
        if (coords.y >= parentCenter.y)
            index |= 2;
        if (coords.z >= parentCenter.z)
            index |= 1;

        Debug.Log("Indexes: " + index + ", " + curIdx + "\n" + Convert.ToString(index, 2).PadLeft(3, '0') + "\n" + Convert.ToString(curIdx, 2).PadLeft(3, '0'));

        if (lod == value.Lod)
        {
            parent[index] = value;
            return;
        }
        if (parent[index] == null)
            parent[index] = new Node(lod);
        InsertRecursive(parent[index], value, idx, parentCorner + Utils.FlatIndexToVector(index) * (1 << lod), coords);
    }

Solution

  • I've found the problem and fixed it, it will be hard to explain, but will try:

    1. The problem was side effect of localizing coords (coords - beginCorner)
    2. It would be not a problem, if I would always put data to the most deep nodes (I am putting data on different levels of Octree).
    3. TLDR - Unlocalized/global coords were aligned to target size node, so its binary representation had 0's after target binary position. E.g: Lets say we want to put data on level 6, coords would look like this "XXX00000", but after localizing it, it would look like this "XXXXXXXX".

    How I fixed it? I make sure that beginCorner of Octree is alligned to the grid of biggest childs (grow mechanism needs modification).

    Here is code before fix:

            Node newRoot = new Node((byte)(_root.Lod + 1));
            int offset = 1 << _root.Lod;
            int index = 0;
            if (direction.x < 0)
            {
                index |= 4;
                beginCorner.x -= offset;
            }
            if (direction.y < 0)
            {
                index |= 2;
                beginCorner.y -= offset;
            }
            if (direction.z < 0)
            {
                index |= 1;
                beginCorner.z -= offset;
            }
            index = Utils.MortonMapping[index];
            newRoot[index] = _root;
            _root = newRoot;
    

    And here is after fix:

                Node newRoot = new Node((byte)(_root.Lod + 1));
                int offset = 1 << _root.Lod;
                int index = 0;
                if (growDirection.x < 0)
                {
                    index |= 4;
                    _corner.x = Mathf.FloorToInt((float)(_corner.x - offset) / offset) * offset;
                }
                if (growDirection.y < 0)
                {
                    index |= 2;
                    _corner.y = Mathf.FloorToInt((float)(_corner.y - offset) / offset) * offset;
                }
                if (growDirection.z < 0)
                {
                    index |= 1;
                    _corner.z = Mathf.FloorToInt((float)(_corner.z - offset) / offset) * offset;
                }
                index = Utils.MortonMapping[index];
                newRoot[index] = _root;
                _root = newRoot;
    

    This problem is crazy hard to understand, so probably that is why Gpt and DeepSeek couldn't find it. "coords - beginCoords" is correct, but who would notice, that it is messing with bits XD