In my Huawei quick app, during music playback, when a user switches to another page in the app, and switches to playing another song using a status bar, the music playback pauses. How does it occurs?
Listen to audio
events on the app home page, not only on the playback page. In this way, when the user leaves the playback page, each audio
event can still be listened to, so as to control the playback logic.
Note: Huawei Quick App Engine does not support calling of the audio
API in app.ux
. Therefore, when a user exits the app, the quick app cannot receive the audio
event callback even though the music is still playing in the background.
The following demo has two pages: Main
(home page) and Audio
. To avoid repeated code and ensure maintainability, separate the code for the Audio
page as a common JavaScript for each page to call.
utils.js
:import audio from '@system.audio';
export default{
listenAudio() {
var that=this;
console.info("util.js listenAudio ");
audio.onplay = function () {
console.log('audio onplay')
}
audio.onpause = function () {
console.log('audio onpause')
}
audio.onended = function () {
console.log('audio onended')
}
audio.ondurationchange = function () {
console.log('util.js ondurationchange')
var total = audio.duration
console.log('util.js ondurationchange total=' + total)
}
audio.ontimeupdate = function () {
var time = audio.currentTime
// console.log('util.js ontimeupdate time=' + time)
}
audio.onprevious = function () {
audio.cover = 'https://xx.jpg'
audio.title = "Piano music"
audio.artist = "Mozart"
// Replace with the music resource link.
audio.src = 'https://xx.mp3'
console.log(' util.js on previout event from notification ')
}
audio.onnext = function () {
audio.cover = 'xx.jpg'
audio.title = 'Pop';
audio.artist = 'Michael Jackson'
// Replace with the music resource link.
audio.src = 'https://xx.mp3'
console.log(' util.js on next event from notification ')
}
},
getAudioPlayState() {
audio.getPlayState({
success: function (data) {
console.log(`getAudioPlayState success: state: ${data.state},src:${data.src},
currentTime:${data.currentTime},autoplay:${data.autoplay},loop:${data.loop},
volume: ${data.volume},muted:${data.muted},notificationVisible:${data.notificationVisible}`);
},
fail: function (data, code) {
console.log('getAudioPlayState fail, code=' + code);
}
});
},
startPlay() {
audio.play();
},
pausePlay() {
audio.pause();
},
stopPlay() {
audio.stop();
},
seekProress(len) {
audio.currentTime = len;
},
setVolume(value) {
audio.volume = value;
},
setMute(isMuted) {
audio.muted = isMuted
},
setLoop(isloop) {
audio.loop = isloop
},
setStreamType() {
if (audio.streamType === 'music') {
audio.streamType = 'voicecall'
} else {
audio.streamType = 'music'
}
console.error('audio.streamType =' + audio.streamType);
},
setTitle(title) {
console.info('setTitle=' + title);
audio.title = title;
},
setArtist(artist) {
console.info('setArtist artist=' + artist) ;
audio.artist = artist;
},
setCover(src) {
console.info('setCover src=' + src);
audio.cover = src;
}
}
audio
event listener to the lifecycle method onShow
of the Main
page and call listenAudio
in utils.js
. Sample code:<script>
import utils from '../Util/utils.js';
module.exports = {
onShow(options) {
utils.listenAudio();
},
}
</script>
audio
event listener to the lifecycle method onShow
of the Audio
page, and call listenAudio
in utils.js
. The progress callback event is listened separately because the playback progress needs to be displayed on the playback page. Sample code: onShow(options) {
var that = this;
utils.listenAudio();
audio.ondurationchange = function () {
console.log('audio ondurationchange')
that.total = audio.duration
console.log('audio ondurationchange total=' + that.total)
}
audio.ontimeupdate = function () {
that.time = audio.currentTime
console.log('ontimeupdate time=' + that.time)
}
},
For more details, please check the following guide: