I'm trying to copy a sqlite database from the data folder in my extension directory, to the profile folder, in order to use it.
So for now, I'm trying with that:
const {Cc, Ci, Cu} = require("chrome");
const {NetUtils} = Cu.import("resource://gre/modules/NetUtil.jsm");
const data = require('sdk/self').data;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/FileUtils.jsm");
var file = Cc["@mozilla.org/file/directory_service;1"].
getService(Ci.nsIProperties).
get("TmpD", Ci.nsIFile);
file.append("searchEngines.sqlite");
file.createUnique(Ci.nsIFile.NORMAL_FILE_TYPE, 0666);
// Then, we need an output stream to our output file.
var ostream = Cc["@mozilla.org/network/file-output-stream;1"].createInstance(Ci.nsIFileOutputStream);
ostream.init(file, -1, -1, 0);
// Finally, we need an input stream to take data from.
var iStreamData = NetUtil.ioService.newChannel(data.url("searchEngines.sqlite"), null, null).open();
let istream = Cc["@mozilla.org/io/string-input-stream;1"].createInstance(Ci.nsIStringInputStream);
istream.setData(iStreamData, iStreamData.length);
NetUtil.asyncCopy(istream, ostream, function(aResult) {
console.log(aResult); // return 0
})
console.log(FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).exists()); // return false
let dbConn = Services.storage.openDatabase(file);
The file seems to exist since the console.log(file.exists())
return FALSE and is not populated (the console.log(aResult)
return 0).
Where is my mistake, and is there a better way to do that?
Besides that it uses sync I/O (opening the channel with .open
instead of .asyncOpen
), the NetUtil.asyncCopy
operation is still async, meaning the code
NetUtil.asyncCopy(istream, ostream, function(aResult) {
console.log(aResult); // return 0
})
console.log(FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).exists()); // return false
let dbConn = Services.storage.openDatabase(file);
will try to open the file before the copy likely finishes!
However, file.exists()
will be likely true, because you already opened the file for writing. It's just that the file is still blank because the data copy isn't done (or even started) yet. (Actually, it is true, because you're checking searchEngines.sqlite
in ProfD
and not TmpD
, but if you correct that the previous statement would apply).
You can only use the file when/after your callback to .asyncCopy
is done, e.g.
NetUtil.asyncCopy(istream, ostream, function(aResult) {
console.log(aResult);
console.log(FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).exists()); // return false
let dbConn = Services.storage.openDatabase(file);
// ...
});
PS: You might want to .asyncOpen
the channel, then use NetUtil.asyncFetch
and pass the resulting stream to .asyncCopy
to be truly async for smallish files, since this caches the contents in memory first.
For large files you could create a variant of the NetUtil.asyncFetch
implementation that feeds the .outputStream
end directly to NetUtils.asyncCopy
. That is a bit more complicated, so I won't be writing this up in detail until somebody is truly interested in this and ask the corresponding question.
Edit, so here is how I'd write it:
const data = require('sdk/self').data;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/NetUtil.jsm");
function copyDataURLToFile(url, file, callback) {
NetUtil.asyncFetch(url, function(istream) {
var ostream = Cc["@mozilla.org/network/file-output-stream;1"].
createInstance(Ci.nsIFileOutputStream);
ostream.init(file, -1, -1, Ci.nsIFileOutputStream.DEFER_OPEN);
NetUtil.asyncCopy(istream, ostream, function(result) {
callback && callback(file, result);
});
});
}
var file = Services.dirsvc.get("TmpD", Ci.nsIFile);
file.append("searchEngines.sqlite");
copyDataURLToFile(data.url("searchEngine.sqlite"), file, function(file, result) {
console.log(result);
console.log(file.exists());
console.log(file.fileSize);
});