Search code examples
pythontensorflowtensorflow-liteedge-tpu

"IndexError: index 10 is out of bounds for axis 0 with size 10" when running inference on Coral Dev Board


I'm trying to run a quantized and Edge-TPU-compiled Tensorflow object detection model on a Coral Dev Board.

My Code:

import time
import os

from PIL import Image
from PIL import ImageDraw

from pycoral.adapters import common
from pycoral.adapters import detect
from pycoral.utils.dataset import read_label_file
from pycoral.utils.edgetpu import make_interpreter

model_path = '/mnt/ssd1/mobilenet_v2_1.0_224_quant_edgetpu.tflite'
label_path = '/mnt/ssd1/meteor-labels.txt'
img_directory = "/mnt/ssd1/test_images/"
img_filenames = os.listdir(img_directory)
count = 5
threshold = 0.2
output_path = "/mnt/ssd1/detection_output/"

labels = read_label_file(label_path) if label_path else {}
interpreter = make_interpreter(model_path)
interpreter.allocate_tensors()


def draw_objects(draw, objs, label_data):
    """Draws the bounding box and label for each object."""
    for obj in objs:
        bbox = obj.bbox
        draw.rectangle([(bbox.xmin, bbox.ymin), (bbox.xmax, bbox.ymax)],
                       outline='red')
        draw.text((bbox.xmin + 10, bbox.ymin + 10),
                  '%s\n%.2f' % (label_data.get(obj.id, obj.id), obj.score),
                  fill='red')


def run_inference(image, index):
    _, scale = common.set_resized_input(
        interpreter, image.size, lambda size: image.resize(size, Image.ANTIALIAS))

    print('----INFERENCE TIME----')
    print('Note: The first inference is slow because it includes',
          'loading the model into Edge TPU memory.')
    for _ in range(5):
        start = time.perf_counter()
        interpreter.invoke()
        inference_time = time.perf_counter() - start
        objs = detect.get_objects(interpreter, threshold, scale)
        print('%.2f ms' % (inference_time * 1000))

    print('-------RESULTS--------')
    if not objs:
        print('No objects detected')

    for obj in objs:
        print(labels.get(obj.id, obj.id))
        print('  id:    ', obj.id)
        print('  score: ', obj.score)
        print('  bbox:  ', obj.bbox)

    if output_path:
        image = image.convert('RGB')
        draw_objects(ImageDraw.Draw(image), objs, labels)
        image.save(os.path.join(output_path, f"{index}.jpg"))
        # image.show()


for i, path in enumerate(img_filenames):
    run_inference(Image.open(os.path.join(img_directory, path)).convert('RGB'), i)

When running it via "mdt shell", it throws the following error:

----INFERENCE TIME----
Note: The first inference is slow because it includes loading the model into Edge TPU memory.
Traceback (most recent call last):
  File "detect_devboard.py", line 86, in <module>
    run_inference(Image.open(os.path.join(img_directory, path)).convert('RGB'), i)
  File "detect_devboard.py", line 65, in run_inference
    objs = detect.get_objects(interpreter, threshold, scale)
  File "/usr/lib/python3/dist-packages/pycoral/adapters/detect.py", line 237, in get_objects
    return [make(i) for i in range(count) if scores[i] >= score_threshold]
  File "/usr/lib/python3/dist-packages/pycoral/adapters/detect.py", line 237, in <listcomp>
    return [make(i) for i in range(count) if scores[i] >= score_threshold]
IndexError: index 10 is out of bounds for axis 0 with size 10

The Dev Board runs Mendel Linux and has Python 3.7.3 and pycoral 2.0.0 installed.

What can I do to be able to successfully run the inference?


Solution

  • It seems to be a bug in the PyCoral API. To solve the issue, I replaced the last line from the "detect.py"-file (in my case located in "/usr/lib/python3/dist-packages/pycoral/adapters/detect.py") with this updated line:

    return [make(i) for i in range(len(scores)) if scores[i] >= score_threshold]