Display audio update (#1291)

* Move createExpressionAudio to DisplayAudio

* Move createAudioFromInfo to DisplayAudio

* Update TextToSpeechAudio
This commit is contained in:
toasted-nutbread 2021-01-22 22:10:27 -05:00 committed by GitHub
parent a51a591c40
commit 7fbfef513d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 53 additions and 51 deletions

View File

@ -22,7 +22,7 @@
class AudioController { class AudioController {
constructor(settingsController) { constructor(settingsController) {
this._settingsController = settingsController; this._settingsController = settingsController;
this._audioSystem = new AudioSystem(false); this._audioSystem = new AudioSystem();
this._audioSourceContainer = null; this._audioSourceContainer = null;
this._audioSourceAddButton = null; this._audioSourceAddButton = null;
this._audioSourceEntries = []; this._audioSourceEntries = [];

View File

@ -16,14 +16,11 @@
*/ */
/* global /* global
* CacheMap
* TextToSpeechAudio * TextToSpeechAudio
* api
*/ */
class AudioSystem { class AudioSystem {
constructor(useCache) { constructor() {
this._cache = new CacheMap(useCache ? 32 : 0);
this._fallbackAudio = null; this._fallbackAudio = null;
} }
@ -36,35 +33,6 @@ class AudioSystem {
eventListeners.addEventListener(speechSynthesis, 'voiceschanged', onVoicesChanged, false); eventListeners.addEventListener(speechSynthesis, 'voiceschanged', onVoicesChanged, false);
} }
async createExpressionAudio(sources, expression, reading, details) {
const key = JSON.stringify([expression, reading]);
const cacheValue = this._cache.get(key);
if (typeof cacheValue !== 'undefined') {
return cacheValue;
}
for (let i = 0, ii = sources.length; i < ii; ++i) {
const source = sources[i];
const infoList = await await api.getExpressionAudioInfoList(source, expression, reading, details);
for (let j = 0, jj = infoList.length; j < jj; ++j) {
const info = infoList[j];
let audio;
try {
audio = await this.createAudioFromInfo(info, source);
} catch (e) {
continue;
}
const result = {audio, source, infoList, infoListIndex: j};
this._cache.set(key, result);
return result;
}
}
throw new Error('Could not create audio');
}
getFallbackAudio() { getFallbackAudio() {
if (this._fallbackAudio === null) { if (this._fallbackAudio === null) {
this._fallbackAudio = new Audio('/mixed/mp3/button.mp3'); this._fallbackAudio = new Audio('/mixed/mp3/button.mp3');
@ -94,17 +62,6 @@ class AudioSystem {
return new TextToSpeechAudio(text, voice); return new TextToSpeechAudio(text, voice);
} }
async createAudioFromInfo(info, source) {
switch (info.type) {
case 'url':
return await this.createAudio(info.url, source);
case 'tts':
return this.createTextToSpeechAudio(info.text, info.voice);
default:
throw new Error(`Unsupported type: ${info.type}`);
}
}
// Private // Private
_isAudioValid(audio, source) { _isAudioValid(audio, source) {

View File

@ -17,16 +17,19 @@
/* global /* global
* AudioSystem * AudioSystem
* CacheMap
* api
*/ */
class DisplayAudio { class DisplayAudio {
constructor(display) { constructor(display) {
this._display = display; this._display = display;
this._audioPlaying = null; this._audioPlaying = null;
this._audioSystem = new AudioSystem(true); this._audioSystem = new AudioSystem();
this._autoPlayAudioTimer = null; this._autoPlayAudioTimer = null;
this._autoPlayAudioDelay = 400; this._autoPlayAudioDelay = 400;
this._eventListeners = new EventListenerCollection(); this._eventListeners = new EventListenerCollection();
this._cache = new CacheMap(32);
} }
get autoPlayAudioDelay() { get autoPlayAudioDelay() {
@ -118,7 +121,7 @@ class DisplayAudio {
let info; let info;
try { try {
let source; let source;
({audio, source} = await this._audioSystem.createExpressionAudio(sources, expression, reading, {textToSpeechVoice, customSourceUrl})); ({audio, source} = await this._createExpressionAudio(sources, expression, reading, {textToSpeechVoice, customSourceUrl}));
const sourceIndex = sources.indexOf(source); const sourceIndex = sources.indexOf(source);
info = `From source ${1 + sourceIndex}: ${source}`; info = `From source ${1 + sourceIndex}: ${source}`;
} catch (e) { } catch (e) {
@ -182,4 +185,44 @@ class DisplayAudio {
} }
return results; return results;
} }
async _createExpressionAudio(sources, expression, reading, details) {
const key = JSON.stringify([expression, reading]);
const cacheValue = this._cache.get(key);
if (typeof cacheValue !== 'undefined') {
return cacheValue;
}
for (let i = 0, ii = sources.length; i < ii; ++i) {
const source = sources[i];
const infoList = await await api.getExpressionAudioInfoList(source, expression, reading, details);
for (let j = 0, jj = infoList.length; j < jj; ++j) {
const info = infoList[j];
let audio;
try {
audio = await this._createAudioFromInfo(info, source);
} catch (e) {
continue;
}
const result = {audio, source, infoList, infoListIndex: j};
this._cache.set(key, result);
return result;
}
}
throw new Error('Could not create audio');
}
async _createAudioFromInfo(info, source) {
switch (info.type) {
case 'url':
return await this._audioSystem.createAudio(info.url, source);
case 'tts':
return this._audioSystem.createTextToSpeechAudio(info.text, info.voice);
default:
throw new Error(`Unsupported type: ${info.type}`);
}
}
} }

View File

@ -17,8 +17,8 @@
class TextToSpeechAudio { class TextToSpeechAudio {
constructor(text, voice) { constructor(text, voice) {
this.text = text; this._text = text;
this.voice = voice; this._voice = voice;
this._utterance = null; this._utterance = null;
this._volume = 1; this._volume = 1;
} }
@ -26,6 +26,7 @@ class TextToSpeechAudio {
get currentTime() { get currentTime() {
return 0; return 0;
} }
set currentTime(value) { set currentTime(value) {
// NOP // NOP
} }
@ -33,6 +34,7 @@ class TextToSpeechAudio {
get volume() { get volume() {
return this._volume; return this._volume;
} }
set volume(value) { set volume(value) {
this._volume = value; this._volume = value;
if (this._utterance !== null) { if (this._utterance !== null) {
@ -43,10 +45,10 @@ class TextToSpeechAudio {
async play() { async play() {
try { try {
if (this._utterance === null) { if (this._utterance === null) {
this._utterance = new SpeechSynthesisUtterance(this.text || ''); this._utterance = new SpeechSynthesisUtterance(typeof this._text === 'string' ? this._text : '');
this._utterance.lang = 'ja-JP'; this._utterance.lang = 'ja-JP';
this._utterance.volume = this._volume; this._utterance.volume = this._volume;
this._utterance.voice = this.voice; this._utterance.voice = this._voice;
} }
speechSynthesis.cancel(); speechSynthesis.cancel();