I'm trying to implement Custom model to my imgLabeling
.
import 'dart:io' as io;
import 'dart:io';
import 'package:google_mlkit_image_labeling/google_mlkit_image_labeling.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:image_picker/image_picker.dart';
import 'package:path/path.dart';
import 'package:path_provider/path_provider.dart';
void main() {
runApp(const MyApp());
}
class MyApp extends StatelessWidget {
const MyApp({super.key});
@override
Widget build(BuildContext context) {
return MaterialApp(
debugShowCheckedModeBanner: false,
title: 'Flutter Demo',
home: const MyHomePage(),
);
}
}
class MyHomePage extends StatefulWidget {
const MyHomePage({super.key});
@override
State<MyHomePage> createState() => _MyHomePageState();
}
class _MyHomePageState extends State<MyHomePage> {
String imageLabel = "";
late ImageLabeler imageLabeler;
void getImageLabels(XFile image) async {
final inputImage = InputImage.fromFilePath(image.path);
List<ImageLabel> labels = await imageLabeler.processImage(inputImage);
StringBuffer sb = StringBuffer();
for (ImageLabel imgLabel in labels) {
String lblText = imgLabel.label;
double confidence = imgLabel.confidence;
sb.write(lblText);
sb.write(' : ');
sb.write((confidence * 100).toStringAsFixed(2));
sb.write('%');
}
imageLabeler.close();
imageLabel = sb.toString();
setState(() {});
}
Future<String> _getModel(String assetPath) async {
if (io.Platform.isAndroid) {
return 'flutter_assets/$assetPath';
}
final path = '${(await getApplicationSupportDirectory()).path}/$assetPath';
await io.Directory(dirname(path)).create(recursive: true);
final file = io.File(path);
if (!await file.exists()) {
final byteData = await rootBundle.load(assetPath);
await file.writeAsBytes(byteData.buffer
.asUint8List(byteData.offsetInBytes, byteData.lengthInBytes));
}
return file.path;
}
void _initializeLabeler() async {
const path = 'assets/ml/kiri-v1.tflite';
final modelPath = await _getModel(path);
final options = LocalLabelerOptions(modelPath: modelPath);
imageLabeler = ImageLabeler(options: options);
}
@override
void initState() {
super.initState();
_initializeLabeler();
}
@override
void dispose() {
imageLabeler.close();
super.dispose();
}
File? image;
Future pickImage(ImageSource source) async {
try {
final image = await ImagePicker().pickImage(source: source);
if (image == null) return;
final imageTemorary = File(image.path);
setState(() {
this.image = imageTemorary;
});
getImageLabels(image);
} on PlatformException catch (e) {
print('fail to pick an Image:$e');
}
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Center(
child: Text('Kiri demo'),
),
),
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Spacer(),
image != null
? Image.file(
image!,
width: 200,
height: 200,
fit: BoxFit.cover,
)
: FlutterLogo(
size: 200,
),
ElevatedButton.icon(
onPressed: () => pickImage(ImageSource.camera),
icon: Icon(Icons.camera),
label: Text('Camera'),
),
ElevatedButton.icon(
onPressed: () => pickImage(ImageSource.gallery),
icon: Icon(Icons.add_photo_alternate_outlined),
label: Text('Galery '),
),
Spacer(),
Text(
imageLabel,
)
],
),
),
);
}
}
E/flutter ( 8933): [ERROR:flutter/runtime/dart_vm_initializer.cc(41)] Unhandled Exception: PlatformException(ImageLabelDetectorError, com.google.mlkit.common.MlKitException: Failed to initialize detector. Input tensor has type kTfLiteFloat32: it requires specifying NormalizationOptions metadata to preprocess input images., null, null)
Here is the console:
I/tflite ( 8933): Replacing 71 node(s) with delegate (TfLiteXNNPackDelegate) node, yielding 1 partitions. I/SurfaceView( 8933): surfaceChanged (720,1510) 1 #1 io.flutter.embedding.android.FlutterSurfaceView{39f184f V.E...... ......ID 0,0-720,1510} I/ViewRootImpl@f714781MainActivity: updateBoundsLayer: shouldReparent = true t = android.view.SurfaceControl$Transaction@be55036 sc = Surface(name=Bounds for - com.example.imagelabel/com.example.imagelabel.MainActivity@2)/@0x2a1b5ac frame = 1 I/SurfaceView( 8933): applySurfaceTransforms: t = android.view.SurfaceControl$Transaction@4ba94e5 surfaceControl = Surface(name=SurfaceView - com.example.imagelabel/com.example.imagelabel.MainActivity@39f184f@2)/@0x33cf275 frame = 1 E/native ( 8933): E0421 14:51:46.114804 16014 calculator_graph.cc:809] NOT_FOUND: CalculatorGraph::Run() failed in Run: E/native ( 8933): Calculator::Open() for node "ClassifierClientCalculator" failed: #vk Input tensor has type kTfLiteFloat32: it requires specifying NormalizationOptions metadata to preprocess input images. [type.googleapis.com/mediapipe.StatusList='\n\xed\x01\x08\x05\x12\xbb\x01\x43\x61lculator::Open() for node "ClassifierClientCalculator" failed: #vk Input tensor has type kTfLiteFloat32: it requires specifying NormalizationOptions metadata to preprocess input images.\x1a+\n$tflite::support::TfLiteSupportStatus\x12\x03\x32\x30\x38'] E/native ( 8933): === Source Location Trace: === E/native ( 8933): third_party/mediapipe/framework/tool/status_util.cc:139 E/native ( 8933): E/native ( 8933): E0421 14:51:46.115117 16014 pipeline_jni.cc:96] NOT_FOUND: CalculatorGraph::Run() failed in Run: E/native ( 8933): Calculator::Open() for node "ClassifierClientCalculator" failed: #vk Input tensor has type kTfLiteFloat32: it requires specifying NormalizationOptions metadata to preprocess input images. [type.googleapis.com/mediapipe.StatusList='\n\xed\x01\x08\x05\x12\xbb\x01\x43\x61lculator::Open() for node "ClassifierClientCalculator" failed: #vk Input tensor has type kTfLiteFloat32: it requires specifying NormalizationOptions metadata to preprocess input images.\x1a+\n$tflite::support::TfLiteSupportStatus\x12\x03\x32\x30\x38']
The labels shoould come under the buttons but nothing appears there.
Please check https://developers.google.com/ml-kit/custom-models#model-compatibility
The data is UINT8 or FLOAT32 type. If the input tensor type is FLOAT32, it must specify the NormalizationOptions by attaching Metadata.