I am attempting to use Microsoft's Face API with Android Studio to make an app. For now, I'm just playing around with the API, but I am having an issue. Of the face attribute types that should be available (see on this page), the only attribute types I have available to pick from are Age, FacialHair, Gender, HeadPose, and Smile. I really want to use the Emotion attribute type, but it isn't recognized.
The error I get: Cannot resolve symbol 'Emotion'
Here is the relevent section of code:
Face[] result = faceServiceClient.detect(inputStreams[0], true, true, new FaceServiceClient.FaceAttributeType[]{FaceServiceClient.FaceAttributeType.Emotion});
and here is the entire code in my MainActivity:
package me.ianterry.face;
import android.app.ProgressDialog;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.AsyncTask;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import com.microsoft.projectoxford.face.*;
import com.microsoft.projectoxford.face.contract.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
public class MainActivity extends AppCompatActivity {
private FaceServiceClient faceServiceClient =
new FaceServiceRestClient("https://westcentralus.api.cognitive.microsoft.com/face/v1.0", "MY KEY");
private ImageView mImageView;
private Button mProcessButton;
private ProgressDialog progress;
public final String TAG = "attributeMethod";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final Bitmap myBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.test_image);
mImageView = findViewById(R.id.image);
mImageView.setImageBitmap(myBitmap);
mProcessButton = findViewById(R.id.btn_process);
mProcessButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
detectAndFrame(myBitmap);
}
});
progress = new ProgressDialog(this);
}
private void detectAndFrame(final Bitmap myBitmap) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
myBitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
AsyncTask<InputStream, String, Face[]> detectTask = new AsyncTask<InputStream, String, Face[]>() {
//private ProgressDialog progress = new ProgressDialog(MainActivity.this);
@Override
protected void onPostExecute(Face[] faces) {
progress.dismiss();
if (faces == null) {
return;
}
mImageView.setImageBitmap(drawFaceRectangleOnBitmap(myBitmap, faces));
attributeMethod(faces);
}
@Override
protected void onPreExecute() {
super.onPreExecute();
progress.show();
}
@Override
protected void onProgressUpdate(String... values) {
super.onProgressUpdate(values);
progress.setMessage(values[0]);
}
@Override
protected Face[] doInBackground(InputStream... inputStreams) {
//return new Face[0];
try {
publishProgress("Detecting...");
Face[] result = faceServiceClient.detect(inputStreams[0], true, true, new FaceServiceClient.FaceAttributeType[]{FaceServiceClient.FaceAttributeType.Emotion});
if (result == null) {
publishProgress("Detection finished. Nothing detected.");
return null;
}
publishProgress(String.format("Detection Finished. %d face(s) detected", result.length));
return result;
} catch (Exception e) {
publishProgress("Detection failed.");
return null;
}
}
};
detectTask.execute(inputStream);
}
private static Bitmap drawFaceRectangleOnBitmap(Bitmap myBitmap, Face[] faces) {
Bitmap bitmap = myBitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.WHITE);
int strokeWidth = 8;
paint.setStrokeWidth(strokeWidth);
if (faces != null) {
for (Face face : faces) {
FaceRectangle faceRectangle = face.faceRectangle;
canvas.drawRect(faceRectangle.left,
faceRectangle.top,
faceRectangle.left + faceRectangle.width,
faceRectangle.top + faceRectangle.height,
paint);
}
}
return bitmap;
}
private void attributeMethod(Face[] faces) {
for (Face face : faces) {
FaceAttribute attribute = face.faceAttributes;
Log.d(TAG, "age: " + attribute.age);
Log.d(TAG, "gender: " + attribute.gender);
}
}
}
This code is more or less taken straight from this tutorial.
Support for Emotion was added in version 1.2.5 of the SDK. Source
You should use version 1.4.1 until version 1.4.3 is released.