I am trying to give an image a predicted label by a trained AutoML model in Firebase function. This image is stored at Google Cloud Storage. I tried to read the image in this way:
const gcs = require('@google-cloud/storage')();
const myBucket = gcs.bucket(object.bucket);
const file = myBucket.file(object.name);
const stream = file.createReadStream();
var data = '';
stream.on('error', function(err) {
console.log("error");
})
.on('data', function(chunk) {
data = data + chunk;
console.log("Writing data");
})
.on('end', function() {
});
After I finished reading data, I transfer the data into 'binary' format
var encoded = new Buffer(data)
encoded = encoded.toString('binary');
But I feed these encoded data into 'imageBytes':
const payload = {
"image": {
"imageBytes": encoded
},
};
var formattedName = client.modelPath(project, location, model);
var request = {
name: formattedName,
payload: payload,
};
client.predict(request)
.then(responses => {
console.log("responses:", responses);
var response = responses[0];
console.log("response:", response);
})
.catch(err => {
console.error(err);
});
It will throw an error:
Error: invalid encoding
at Error (native)
at Object.decode (/user_code/node_modules/@google-cloud/automl/node_modules/@protobufjs/base64/index.js:105:19)
at Type.Image$fromObject [as fromObject] (eval at Codegen (/user_code/node_modules/@google-cloud/automl/node_modules/@protobufjs/codegen/index.js:50:33), <anonymous>:9:15)
at Type.ExamplePayload$fromObject [as fromObject] (eval at Codegen (/user_code/node_modules/@google-cloud/automl/node_modules/@protobufjs/codegen/index.js:50:33), <anonymous>:10:20)
at Type.PredictRequest$fromObject [as fromObject] (eval at Codegen (/user_code/node_modules/@google-cloud/automl/node_modules/@protobufjs/codegen/index.js:50:33), <anonymous>:13:22)
at serialize (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/protobuf_js_6_common.js:70:23)
at Object.final_requester.sendMessage (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/client_interceptors.js:802:37)
at InterceptingCall._callNext (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/client_interceptors.js:418:43)
at InterceptingCall.sendMessage (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/client_interceptors.js:460:8)
at InterceptingCall._callNext (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/client_interceptors.js:424:12)
But if I encoded the image in 'base64', it will throw an error:
Error: 3 INVALID_ARGUMENT: Provided image is not valid.
at Object.exports.createStatusError (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/common.js:87:15)
at Object.onReceiveStatus (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/client_interceptors.js:1188:28)
at InterceptingListener._callNext (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/client_interceptors.js:564:42)
at InterceptingListener.onReceiveStatus (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/client_interceptors.js:614:8)
at callback (/user_code/node_modules/@google-cloud/automl/node_modules/grpc/src/client_interceptors.js:841:24)
code: 3,
metadata: Metadata { _internal_repr: { 'grpc-server-stats-bin': [Object] } },
details: 'Provided image is not valid.'
I tried local image file prediction in Python as well, it uses 'binary' binary representation and it works well. When I use 'base64' in Python it will return "Provided image is not valid." as in firebase function.
I am confusing that whether I read the image from Cloud Storage in a wrong way or I encoded the image in a wrong way.
Complete Code in Firebase function:
const automl = require('@google-cloud/automl');
var client = new automl.v1beta1.PredictionServiceClient();
const gcs = require('@google-cloud/storage')();
const myBucket = gcs.bucket(object.bucket);
const file = myBucket.file(object.name);
const stream = file.createReadStream();
var data = '';
stream.on('error', function(err) {
console.log("error");
})
.on('data', function(chunk) {
data = data + chunk;
console.log("Writing data");
})
.on('end', function() {
var encoded = new Buffer(data)
encoded = encoded.toString('binary');
console.log("binary:", encoded);
const payload = {
"image": {
"imageBytes": encoded
},
};
var formattedName = client.modelPath(project, location, model);
var request = {
name: formattedName,
payload: payload,
};
client.predict(request)
.then(responses => {
console.log("responses:", responses);
var response = responses[0];
console.log("response:", response);
})
.catch(err => {
console.error(err);
});
stream.destroy();
});
Complete code in Python:
import sys
from google.cloud import automl_v1beta1
from google.cloud.automl_v1beta1.proto import service_pb2
# Import the base64 encoding library.
import base64
def get_prediction(content, project_id, model_id):
prediction_client = automl_v1beta1.PredictionServiceClient()
name = 'projects/{}/locations/us-central1/models/{}'.format(project_id, model_id)
payload = {'image': {'image_bytes': content }}
params = {}
request = prediction_client.predict(name, payload, params)
return request # waits till request is returned
if __name__ == '__main__':
file_path = sys.argv[1]
project_id = sys.argv[2]
model_id = sys.argv[3]
with open(file_path, 'rb') as ff:
content = ff.read()
print(content)
# Encoded as base64
# content = base64.b64encode(content)
print(get_prediction(content, project_id, model_id))
I use file.download(), it works.
file.download().then(imageData => {
const image = imageData[0];
const buffer = image.toString('base64');
const payload = {
"image": {
"imageBytes": buffer
}
}
const request = {
name: formattedName,
payload: payload
};
client.predict(request).then(result => {
console.log('predict:', result);
}).catch(err => console.error(err));
});