Update the parameters passed to various audio-related functions

This commit is contained in:
toasted-nutbread 2020-04-10 13:44:31 -04:00
parent 5b3d7fadc3
commit 7fc3882607
7 changed files with 45 additions and 56 deletions

View File

@ -85,14 +85,14 @@ class AnkiNoteBuilder {
});
}
async injectAudio(definition, fields, sources, optionsContext) {
async injectAudio(definition, fields, sources, details) {
if (!this._containsMarker(fields, 'audio')) { return; }
try {
const expressions = definition.expressions;
const audioSourceDefinition = Array.isArray(expressions) ? expressions[0] : definition;
const {uri} = await this._audioSystem.getDefinitionAudio(audioSourceDefinition, sources, {tts: false, optionsContext});
const {uri} = await this._audioSystem.getDefinitionAudio(audioSourceDefinition, sources, details);
const filename = this._createInjectedAudioFileName(audioSourceDefinition);
if (filename !== null) {
definition.audio = {url: uri, filename};

View File

@ -49,11 +49,11 @@ class AudioUriBuilder {
return url;
}
async getUri(definition, source, options) {
async getUri(definition, source, details) {
const handler = this._getUrlHandlers.get(source);
if (typeof handler === 'function') {
try {
return await handler(definition, options);
return await handler(definition, details);
} catch (e) {
// NOP
}
@ -132,26 +132,24 @@ class AudioUriBuilder {
throw new Error('Failed to find audio URL');
}
async _getUriTextToSpeech(definition, options) {
const voiceURI = options.audio.textToSpeechVoice;
if (!voiceURI) {
async _getUriTextToSpeech(definition, {textToSpeechVoice}) {
if (!textToSpeechVoice) {
throw new Error('No voice');
}
return `tts:?text=${encodeURIComponent(definition.expression)}&voice=${encodeURIComponent(voiceURI)}`;
return `tts:?text=${encodeURIComponent(definition.expression)}&voice=${encodeURIComponent(textToSpeechVoice)}`;
}
async _getUriTextToSpeechReading(definition, options) {
const voiceURI = options.audio.textToSpeechVoice;
if (!voiceURI) {
async _getUriTextToSpeechReading(definition, {textToSpeechVoice}) {
if (!textToSpeechVoice) {
throw new Error('No voice');
}
return `tts:?text=${encodeURIComponent(definition.reading || definition.expression)}&voice=${encodeURIComponent(voiceURI)}`;
return `tts:?text=${encodeURIComponent(definition.reading || definition.expression)}&voice=${encodeURIComponent(textToSpeechVoice)}`;
}
async _getUriCustom(definition, options) {
const customSourceUrl = options.audio.customSourceUrl;
async _getUriCustom(definition, {customSourceUrl}) {
if (typeof customSourceUrl !== 'string') {
throw new Error('No custom URL defined');
}
return customSourceUrl.replace(/\{([^}]*)\}/g, (m0, m1) => (hasOwn(definition, m1) ? `${definition[m1]}` : m0));
}
}

View File

@ -51,8 +51,10 @@ class Backend {
this.options = null;
this.optionsSchema = null;
this.defaultAnkiFieldTemplates = null;
this.audioSystem = new AudioSystem({getAudioUri: this._getAudioUri.bind(this)});
this.audioUriBuilder = new AudioUriBuilder();
this.audioSystem = new AudioSystem({
audioUriBuilder: this.audioUriBuilder
});
this.ankiNoteBuilder = new AnkiNoteBuilder({
anki: this.anki,
audioSystem: this.audioSystem,
@ -494,11 +496,12 @@ class Backend {
const templates = this.defaultAnkiFieldTemplates;
if (mode !== 'kanji') {
const {customSourceUrl} = options.audio;
await this.ankiNoteBuilder.injectAudio(
definition,
options.anki.terms.fields,
options.audio.sources,
optionsContext
{textToSpeechVoice: null, customSourceUrl}
);
}
@ -573,9 +576,8 @@ class Backend {
return this._runCommand(command, params);
}
async _onApiAudioGetUri({definition, source, optionsContext}) {
const options = this.getOptions(optionsContext);
return await this.audioUriBuilder.getUri(definition, source, options);
async _onApiAudioGetUri({definition, source, details}) {
return await this.audioUriBuilder.getUri(definition, source, details);
}
_onApiScreenshotGet({options}, sender) {
@ -861,16 +863,6 @@ class Backend {
}
}
async _getAudioUri(definition, source, details) {
let optionsContext = (typeof details === 'object' && details !== null ? details.optionsContext : null);
if (!(typeof optionsContext === 'object' && optionsContext !== null)) {
optionsContext = this.optionsContext;
}
const options = this.getOptions(optionsContext);
return await this.audioUriBuilder.getUri(definition, source, options);
}
async _renderTemplate(template, data) {
return handlebarsRenderDynamic(template, data);
}

View File

@ -28,12 +28,7 @@ let audioSourceUI = null;
let audioSystem = null;
async function audioSettingsInitialize() {
audioSystem = new AudioSystem({
getAudioUri: async (definition, source) => {
const optionsContext = getOptionsContext();
return await apiAudioGetUri(definition, source, optionsContext);
}
});
audioSystem = new AudioSystem({audioUriBuilder: null});
const optionsContext = getOptionsContext();
const options = await getOptionsMutable(optionsContext);

View File

@ -64,8 +64,8 @@ function apiTemplateRender(template, data) {
return _apiInvoke('templateRender', {data, template});
}
function apiAudioGetUri(definition, source, optionsContext) {
return _apiInvoke('audioGetUri', {definition, source, optionsContext});
function apiAudioGetUri(definition, source, details) {
return _apiInvoke('audioGetUri', {definition, source, details});
}
function apiCommandExec(command, params) {

View File

@ -66,10 +66,10 @@ class TextToSpeechAudio {
}
class AudioSystem {
constructor({getAudioUri}) {
constructor({audioUriBuilder}) {
this._cache = new Map();
this._cacheSizeMaximum = 32;
this._getAudioUri = getAudioUri;
this._audioUriBuilder = audioUriBuilder;
if (typeof speechSynthesis !== 'undefined') {
// speechSynthesis.getVoices() will not be populated unless some API call is made.
@ -90,7 +90,7 @@ class AudioSystem {
if (uri === null) { continue; }
try {
const audio = await this._createAudio(uri, details);
const audio = await this._createAudio(uri);
this._cacheCheck();
this._cache.set(key, {audio, uri, source});
return {audio, uri, source};
@ -114,20 +114,23 @@ class AudioSystem {
// NOP
}
async _createAudio(uri, details) {
async _createAudio(uri) {
const ttsParameters = this._getTextToSpeechParameters(uri);
if (ttsParameters !== null) {
if (typeof details === 'object' && details !== null) {
if (details.tts === false) {
throw new Error('Text-to-speech not permitted');
}
}
return this.createTextToSpeechAudio(ttsParameters);
}
return await this._createAudioFromUrl(uri);
}
_getAudioUri(definition, source, details) {
return (
this._audioUriBuilder !== null ?
this._audioUriBuilder.getUri(definition, source, details) :
null
);
}
_createAudioFromUrl(url) {
return new Promise((resolve, reject) => {
const audio = new Audio(url);

View File

@ -45,7 +45,13 @@ class Display {
this.index = 0;
this.audioPlaying = null;
this.audioFallback = null;
this.audioSystem = new AudioSystem({getAudioUri: this._getAudioUri.bind(this)});
this.audioSystem = new AudioSystem({
audioUriBuilder: {
async getUri(definition, source, details) {
return await apiAudioGetUri(definition, source, details);
}
}
});
this.styleNode = null;
this.eventListeners = new EventListenerCollection();
@ -789,10 +795,10 @@ class Display {
this.audioPlaying = null;
}
const sources = this.options.audio.sources;
let audio, source, info;
try {
({audio, source} = await this.audioSystem.getDefinitionAudio(expression, sources));
const {sources, textToSpeechVoice, customSourceUrl} = this.options.audio;
({audio, source} = await this.audioSystem.getDefinitionAudio(expression, sources, {textToSpeechVoice, customSourceUrl}));
info = `From source ${1 + sources.indexOf(source)}: ${source}`;
} catch (e) {
if (this.audioFallback === null) {
@ -947,9 +953,4 @@ class Display {
}
};
}
async _getAudioUri(definition, source) {
const optionsContext = this.getOptionsContext();
return await apiAudioGetUri(definition, source, optionsContext);
}
}