Merge pull request #442 from toasted-nutbread/audio-system-refactoring

Audio system refactoring
This commit is contained in:
toasted-nutbread 2020-04-18 21:10:48 -04:00 committed by GitHub
commit 03d77cc3a6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 103 additions and 78 deletions

View File

@ -85,14 +85,14 @@ class AnkiNoteBuilder {
});
}
async injectAudio(definition, fields, sources, optionsContext) {
async injectAudio(definition, fields, sources, details) {
if (!this._containsMarker(fields, 'audio')) { return; }
try {
const expressions = definition.expressions;
const audioSourceDefinition = Array.isArray(expressions) ? expressions[0] : definition;
const {uri} = await this._audioSystem.getDefinitionAudio(audioSourceDefinition, sources, {tts: false, optionsContext});
const {uri} = await this._audioSystem.getDefinitionAudio(audioSourceDefinition, sources, details);
const filename = this._createInjectedAudioFileName(audioSourceDefinition);
if (filename !== null) {
definition.audio = {url: uri, filename};

View File

@ -49,11 +49,11 @@ class AudioUriBuilder {
return url;
}
async getUri(definition, source, options) {
async getUri(definition, source, details) {
const handler = this._getUrlHandlers.get(source);
if (typeof handler === 'function') {
try {
return await handler(definition, options);
return await handler(definition, details);
} catch (e) {
// NOP
}
@ -132,26 +132,24 @@ class AudioUriBuilder {
throw new Error('Failed to find audio URL');
}
async _getUriTextToSpeech(definition, options) {
const voiceURI = options.audio.textToSpeechVoice;
if (!voiceURI) {
async _getUriTextToSpeech(definition, {textToSpeechVoice}) {
if (!textToSpeechVoice) {
throw new Error('No voice');
}
return `tts:?text=${encodeURIComponent(definition.expression)}&voice=${encodeURIComponent(voiceURI)}`;
return `tts:?text=${encodeURIComponent(definition.expression)}&voice=${encodeURIComponent(textToSpeechVoice)}`;
}
async _getUriTextToSpeechReading(definition, options) {
const voiceURI = options.audio.textToSpeechVoice;
if (!voiceURI) {
async _getUriTextToSpeechReading(definition, {textToSpeechVoice}) {
if (!textToSpeechVoice) {
throw new Error('No voice');
}
return `tts:?text=${encodeURIComponent(definition.reading || definition.expression)}&voice=${encodeURIComponent(voiceURI)}`;
return `tts:?text=${encodeURIComponent(definition.reading || definition.expression)}&voice=${encodeURIComponent(textToSpeechVoice)}`;
}
async _getUriCustom(definition, options) {
const customSourceUrl = options.audio.customSourceUrl;
async _getUriCustom(definition, {customSourceUrl}) {
if (typeof customSourceUrl !== 'string') {
throw new Error('No custom URL defined');
}
return customSourceUrl.replace(/\{([^}]*)\}/g, (m0, m1) => (hasOwn(definition, m1) ? `${definition[m1]}` : m0));
}
}

View File

@ -51,8 +51,11 @@ class Backend {
this.options = null;
this.optionsSchema = null;
this.defaultAnkiFieldTemplates = null;
this.audioSystem = new AudioSystem({getAudioUri: this._getAudioUri.bind(this)});
this.audioUriBuilder = new AudioUriBuilder();
this.audioSystem = new AudioSystem({
audioUriBuilder: this.audioUriBuilder,
useCache: false
});
this.ankiNoteBuilder = new AnkiNoteBuilder({
anki: this.anki,
audioSystem: this.audioSystem,
@ -494,11 +497,12 @@ class Backend {
const templates = this.defaultAnkiFieldTemplates;
if (mode !== 'kanji') {
const {customSourceUrl} = options.audio;
await this.ankiNoteBuilder.injectAudio(
definition,
options.anki.terms.fields,
options.audio.sources,
optionsContext
{textToSpeechVoice: null, customSourceUrl}
);
}
@ -573,9 +577,8 @@ class Backend {
return this._runCommand(command, params);
}
async _onApiAudioGetUri({definition, source, optionsContext}) {
const options = this.getOptions(optionsContext);
return await this.audioUriBuilder.getUri(definition, source, options);
async _onApiAudioGetUri({definition, source, details}) {
return await this.audioUriBuilder.getUri(definition, source, details);
}
_onApiScreenshotGet({options}, sender) {
@ -861,16 +864,6 @@ class Backend {
}
}
async _getAudioUri(definition, source, details) {
let optionsContext = (typeof details === 'object' && details !== null ? details.optionsContext : null);
if (!(typeof optionsContext === 'object' && optionsContext !== null)) {
optionsContext = this.optionsContext;
}
const options = this.getOptions(optionsContext);
return await this.audioUriBuilder.getUri(definition, source, options);
}
async _renderTemplate(template, data) {
return handlebarsRenderDynamic(template, data);
}

View File

@ -18,7 +18,6 @@
/* global
* AudioSourceUI
* AudioSystem
* apiAudioGetUri
* getOptionsContext
* getOptionsMutable
* settingsSaveOptions
@ -29,10 +28,8 @@ let audioSystem = null;
async function audioSettingsInitialize() {
audioSystem = new AudioSystem({
getAudioUri: async (definition, source) => {
const optionsContext = getOptionsContext();
return await apiAudioGetUri(definition, source, optionsContext);
}
audioUriBuilder: null,
useCache: true
});
const optionsContext = getOptionsContext();
@ -115,7 +112,7 @@ function textToSpeechTest() {
const text = document.querySelector('#text-to-speech-voice-test').dataset.speechText || '';
const voiceUri = document.querySelector('#text-to-speech-voice').value;
const audio = audioSystem.createTextToSpeechAudio({text, voiceUri});
const audio = audioSystem.createTextToSpeechAudio(text, voiceUri);
audio.volume = 1.0;
audio.play();
} catch (e) {

View File

@ -64,8 +64,8 @@ function apiTemplateRender(template, data) {
return _apiInvoke('templateRender', {data, template});
}
function apiAudioGetUri(definition, source, optionsContext) {
return _apiInvoke('audioGetUri', {definition, source, optionsContext});
function apiAudioGetUri(definition, source, details) {
return _apiInvoke('audioGetUri', {definition, source, details});
}
function apiCommandExec(command, params) {

View File

@ -40,7 +40,7 @@ class TextToSpeechAudio {
}
}
play() {
async play() {
try {
if (this._utterance === null) {
this._utterance = new SpeechSynthesisUtterance(this.text || '');
@ -66,10 +66,10 @@ class TextToSpeechAudio {
}
class AudioSystem {
constructor({getAudioUri}) {
this._cache = new Map();
constructor({audioUriBuilder, useCache}) {
this._cache = useCache ? new Map() : null;
this._cacheSizeMaximum = 32;
this._getAudioUri = getAudioUri;
this._audioUriBuilder = audioUriBuilder;
if (typeof speechSynthesis !== 'undefined') {
// speechSynthesis.getVoices() will not be populated unless some API call is made.
@ -79,21 +79,31 @@ class AudioSystem {
async getDefinitionAudio(definition, sources, details) {
const key = `${definition.expression}:${definition.reading}`;
const cacheValue = this._cache.get(definition);
if (typeof cacheValue !== 'undefined') {
const {audio, uri, source} = cacheValue;
return {audio, uri, source};
const hasCache = (this._cache !== null);
if (hasCache) {
const cacheValue = this._cache.get(key);
if (typeof cacheValue !== 'undefined') {
const {audio, uri, source} = cacheValue;
const index = sources.indexOf(source);
if (index >= 0) {
return {audio, uri, index};
}
}
}
for (const source of sources) {
for (let i = 0, ii = sources.length; i < ii; ++i) {
const source = sources[i];
const uri = await this._getAudioUri(definition, source, details);
if (uri === null) { continue; }
try {
const audio = await this._createAudio(uri, details);
this._cacheCheck();
this._cache.set(key, {audio, uri, source});
return {audio, uri, source};
const audio = await this._createAudio(uri);
if (hasCache) {
this._cacheCheck();
this._cache.set(key, {audio, uri, source});
}
return {audio, uri, index: i};
} catch (e) {
// NOP
}
@ -102,7 +112,7 @@ class AudioSystem {
throw new Error('Could not create audio');
}
createTextToSpeechAudio({text, voiceUri}) {
createTextToSpeechAudio(text, voiceUri) {
const voice = this._getTextToSpeechVoiceFromVoiceUri(voiceUri);
if (voice === null) {
throw new Error('Invalid text-to-speech voice');
@ -114,20 +124,24 @@ class AudioSystem {
// NOP
}
async _createAudio(uri, details) {
async _createAudio(uri) {
const ttsParameters = this._getTextToSpeechParameters(uri);
if (ttsParameters !== null) {
if (typeof details === 'object' && details !== null) {
if (details.tts === false) {
throw new Error('Text-to-speech not permitted');
}
}
return this.createTextToSpeechAudio(ttsParameters);
const {text, voiceUri} = ttsParameters;
return this.createTextToSpeechAudio(text, voiceUri);
}
return await this._createAudioFromUrl(uri);
}
_getAudioUri(definition, source, details) {
return (
this._audioUriBuilder !== null ?
this._audioUriBuilder.getUri(definition, source, details) :
null
);
}
_createAudioFromUrl(url) {
return new Promise((resolve, reject) => {
const audio = new Audio(url);

View File

@ -45,7 +45,14 @@ class Display {
this.index = 0;
this.audioPlaying = null;
this.audioFallback = null;
this.audioSystem = new AudioSystem({getAudioUri: this._getAudioUri.bind(this)});
this.audioSystem = new AudioSystem({
audioUriBuilder: {
getUri: async (definition, source, details) => {
return await apiAudioGetUri(definition, source, details);
}
},
useCache: true
});
this.styleNode = null;
this.eventListeners = new EventListenerCollection();
@ -784,16 +791,14 @@ class Display {
const expression = expressionIndex === -1 ? definition : definition.expressions[expressionIndex];
if (this.audioPlaying !== null) {
this.audioPlaying.pause();
this.audioPlaying = null;
}
this._stopPlayingAudio();
const sources = this.options.audio.sources;
let audio, source, info;
let audio, info;
try {
({audio, source} = await this.audioSystem.getDefinitionAudio(expression, sources));
info = `From source ${1 + sources.indexOf(source)}: ${source}`;
const {sources, textToSpeechVoice, customSourceUrl} = this.options.audio;
let index;
({audio, index} = await this.audioSystem.getDefinitionAudio(expression, sources, {textToSpeechVoice, customSourceUrl}));
info = `From source ${1 + index}: ${sources[index]}`;
} catch (e) {
if (this.audioFallback === null) {
this.audioFallback = new Audio('/mixed/mp3/button.mp3');
@ -802,7 +807,7 @@ class Display {
info = 'Could not find audio';
}
const button = this.audioButtonFindImage(entryIndex);
const button = this.audioButtonFindImage(entryIndex, expressionIndex);
if (button !== null) {
let titleDefault = button.dataset.titleDefault;
if (!titleDefault) {
@ -812,10 +817,19 @@ class Display {
button.title = `${titleDefault}\n${info}`;
}
this._stopPlayingAudio();
this.audioPlaying = audio;
audio.currentTime = 0;
audio.volume = this.options.audio.volume / 100.0;
audio.play();
const playPromise = audio.play();
if (typeof playPromise !== 'undefined') {
try {
await playPromise;
} catch (e2) {
// NOP
}
}
} catch (e) {
this.onError(e);
} finally {
@ -823,6 +837,13 @@ class Display {
}
}
_stopPlayingAudio() {
if (this.audioPlaying !== null) {
this.audioPlaying.pause();
this.audioPlaying = null;
}
}
noteUsesScreenshot(mode) {
const optionsAnki = this.options.anki;
const fields = (mode === 'kanji' ? optionsAnki.kanji : optionsAnki.terms).fields;
@ -901,9 +922,16 @@ class Display {
viewerButton.dataset.noteId = noteId;
}
audioButtonFindImage(index) {
audioButtonFindImage(index, expressionIndex) {
const entry = this.getEntry(index);
return entry !== null ? entry.querySelector('.action-play-audio>img') : null;
if (entry === null) { return null; }
const container = (
expressionIndex >= 0 ?
entry.querySelector(`.term-expression:nth-of-type(${expressionIndex + 1})`) :
entry
);
return container !== null ? container.querySelector('.action-play-audio>img') : null;
}
async getDefinitionsAddable(definitions, modes) {
@ -947,9 +975,4 @@ class Display {
}
};
}
async _getAudioUri(definition, source) {
const optionsContext = this.getOptionsContext();
return await apiAudioGetUri(definition, source, optionsContext);
}
}