Merge pull request #401 from toasted-nutbread/audio-refactor

Audio refactor
This commit is contained in:
toasted-nutbread 2020-03-10 19:20:34 -04:00 committed by GitHub
commit 36c55f0b17
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 115 additions and 104 deletions

View File

@ -24,7 +24,7 @@
<script src="/bg/js/anki.js"></script> <script src="/bg/js/anki.js"></script>
<script src="/bg/js/anki-note-builder.js"></script> <script src="/bg/js/anki-note-builder.js"></script>
<script src="/bg/js/mecab.js"></script> <script src="/bg/js/mecab.js"></script>
<script src="/bg/js/audio.js"></script> <script src="/bg/js/audio-uri-builder.js"></script>
<script src="/bg/js/backend-api-forwarder.js"></script> <script src="/bg/js/backend-api-forwarder.js"></script>
<script src="/bg/js/clipboard-monitor.js"></script> <script src="/bg/js/clipboard-monitor.js"></script>
<script src="/bg/js/conditions.js"></script> <script src="/bg/js/conditions.js"></script>

View File

@ -18,8 +18,49 @@
/*global jpIsStringEntirelyKana*/ /*global jpIsStringEntirelyKana*/
const audioUrlBuilders = new Map([ class AudioUriBuilder {
['jpod101', async (definition) => { constructor() {
this._getUrlHandlers = new Map([
['jpod101', this._getUriJpod101.bind(this)],
['jpod101-alternate', this._getUriJpod101Alternate.bind(this)],
['jisho', this._getUriJisho.bind(this)],
['text-to-speech', this._getUriTextToSpeech.bind(this)],
['text-to-speech-reading', this._getUriTextToSpeechReading.bind(this)],
['custom', this._getUriCustom.bind(this)]
]);
}
normalizeUrl(url, baseUrl, basePath) {
if (url) {
if (url[0] === '/') {
if (url.length >= 2 && url[1] === '/') {
// Begins with "//"
url = baseUrl.substring(0, baseUrl.indexOf(':') + 1) + url;
} else {
// Begins with "/"
url = baseUrl + url;
}
} else if (!/^[a-z][a-z0-9\-+.]*:/i.test(url)) {
// No URI scheme => relative path
url = baseUrl + basePath + url;
}
}
return url;
}
async getUri(definition, source, options) {
const handler = this._getUrlHandlers.get(source);
if (typeof handler === 'function') {
try {
return await handler(definition, options);
} catch (e) {
// NOP
}
}
return null;
}
async _getUriJpod101(definition) {
let kana = definition.reading; let kana = definition.reading;
let kanji = definition.expression; let kanji = definition.expression;
@ -37,8 +78,9 @@ const audioUrlBuilders = new Map([
} }
return `https://assets.languagepod101.com/dictionary/japanese/audiomp3.php?${params.join('&')}`; return `https://assets.languagepod101.com/dictionary/japanese/audiomp3.php?${params.join('&')}`;
}], }
['jpod101-alternate', async (definition) => {
async _getUriJpod101Alternate(definition) {
const response = await new Promise((resolve, reject) => { const response = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest(); const xhr = new XMLHttpRequest();
xhr.open('POST', 'https://www.japanesepod101.com/learningcenter/reference/dictionary_post'); xhr.open('POST', 'https://www.japanesepod101.com/learningcenter/reference/dictionary_post');
@ -54,7 +96,7 @@ const audioUrlBuilders = new Map([
const url = row.querySelector('audio>source[src]').getAttribute('src'); const url = row.querySelector('audio>source[src]').getAttribute('src');
const reading = row.getElementsByClassName('dc-vocab_kana').item(0).textContent; const reading = row.getElementsByClassName('dc-vocab_kana').item(0).textContent;
if (url && reading && (!definition.reading || definition.reading === reading)) { if (url && reading && (!definition.reading || definition.reading === reading)) {
return audioUrlNormalize(url, 'https://www.japanesepod101.com', '/learningcenter/reference/'); return this.normalizeUrl(url, 'https://www.japanesepod101.com', '/learningcenter/reference/');
} }
} catch (e) { } catch (e) {
// NOP // NOP
@ -62,8 +104,9 @@ const audioUrlBuilders = new Map([
} }
throw new Error('Failed to find audio URL'); throw new Error('Failed to find audio URL');
}], }
['jisho', async (definition) => {
async _getUriJisho(definition) {
const response = await new Promise((resolve, reject) => { const response = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest(); const xhr = new XMLHttpRequest();
xhr.open('GET', `https://jisho.org/search/${definition.expression}`); xhr.open('GET', `https://jisho.org/search/${definition.expression}`);
@ -78,7 +121,7 @@ const audioUrlBuilders = new Map([
if (audio !== null) { if (audio !== null) {
const url = audio.getElementsByTagName('source').item(0).getAttribute('src'); const url = audio.getElementsByTagName('source').item(0).getAttribute('src');
if (url) { if (url) {
return audioUrlNormalize(url, 'https://jisho.org', '/search/'); return this.normalizeUrl(url, 'https://jisho.org', '/search/');
} }
} }
} catch (e) { } catch (e) {
@ -86,99 +129,28 @@ const audioUrlBuilders = new Map([
} }
throw new Error('Failed to find audio URL'); throw new Error('Failed to find audio URL');
}], }
['text-to-speech', async (definition, options) => {
async _getUriTextToSpeech(definition, options) {
const voiceURI = options.audio.textToSpeechVoice; const voiceURI = options.audio.textToSpeechVoice;
if (!voiceURI) { if (!voiceURI) {
throw new Error('No voice'); throw new Error('No voice');
} }
return `tts:?text=${encodeURIComponent(definition.expression)}&voice=${encodeURIComponent(voiceURI)}`; return `tts:?text=${encodeURIComponent(definition.expression)}&voice=${encodeURIComponent(voiceURI)}`;
}], }
['text-to-speech-reading', async (definition, options) => {
async _getUriTextToSpeechReading(definition, options) {
const voiceURI = options.audio.textToSpeechVoice; const voiceURI = options.audio.textToSpeechVoice;
if (!voiceURI) { if (!voiceURI) {
throw new Error('No voice'); throw new Error('No voice');
} }
return `tts:?text=${encodeURIComponent(definition.reading || definition.expression)}&voice=${encodeURIComponent(voiceURI)}`; return `tts:?text=${encodeURIComponent(definition.reading || definition.expression)}&voice=${encodeURIComponent(voiceURI)}`;
}], }
['custom', async (definition, options) => {
async _getUriCustom(definition, options) {
const customSourceUrl = options.audio.customSourceUrl; const customSourceUrl = options.audio.customSourceUrl;
return customSourceUrl.replace(/\{([^}]*)\}/g, (m0, m1) => (hasOwn(definition, m1) ? `${definition[m1]}` : m0)); return customSourceUrl.replace(/\{([^}]*)\}/g, (m0, m1) => (hasOwn(definition, m1) ? `${definition[m1]}` : m0));
}]
]);
async function audioGetUrl(definition, mode, options, download) {
const handler = audioUrlBuilders.get(mode);
if (typeof handler === 'function') {
try {
return await handler(definition, options, download);
} catch (e) {
// NOP
}
}
return null;
}
function audioUrlNormalize(url, baseUrl, basePath) {
if (url) {
if (url[0] === '/') {
if (url.length >= 2 && url[1] === '/') {
// Begins with "//"
url = baseUrl.substring(0, baseUrl.indexOf(':') + 1) + url;
} else {
// Begins with "/"
url = baseUrl + url;
}
} else if (!/^[a-z][a-z0-9\-+.]*:/i.test(url)) {
// No URI scheme => relative path
url = baseUrl + basePath + url;
}
}
return url;
}
function audioBuildFilename(definition) {
if (definition.reading || definition.expression) {
let filename = 'yomichan';
if (definition.reading) {
filename += `_${definition.reading}`;
}
if (definition.expression) {
filename += `_${definition.expression}`;
}
return filename += '.mp3';
}
return null;
}
async function audioInject(definition, fields, sources, optionsContext, audioSystem) {
let usesAudio = false;
for (const fieldValue of Object.values(fields)) {
if (fieldValue.includes('{audio}')) {
usesAudio = true;
break;
}
}
if (!usesAudio) {
return true;
}
try {
const expressions = definition.expressions;
const audioSourceDefinition = Array.isArray(expressions) ? expressions[0] : definition;
const {uri} = await audioSystem.getDefinitionAudio(audioSourceDefinition, sources, {tts: false, optionsContext});
const filename = audioBuildFilename(audioSourceDefinition);
if (filename !== null) {
definition.audio = {url: uri, filename};
}
return true;
} catch (e) {
return false;
} }
} }

View File

@ -21,9 +21,8 @@ conditionsTestValue, profileConditionsDescriptor
handlebarsRenderDynamic handlebarsRenderDynamic
requestText, requestJson, optionsLoad requestText, requestJson, optionsLoad
dictConfigured, dictTermsSort, dictEnabledSet dictConfigured, dictTermsSort, dictEnabledSet
audioGetUrl, audioInject
jpConvertReading, jpDistributeFuriganaInflected, jpKatakanaToHiragana jpConvertReading, jpDistributeFuriganaInflected, jpKatakanaToHiragana
AnkiNoteBuilder, AudioSystem, Translator, AnkiConnect, AnkiNull, Mecab, BackendApiForwarder, JsonSchema, ClipboardMonitor*/ AnkiNoteBuilder, AudioSystem, AudioUriBuilder, Translator, AnkiConnect, AnkiNull, Mecab, BackendApiForwarder, JsonSchema, ClipboardMonitor*/
class Backend { class Backend {
constructor() { constructor() {
@ -36,6 +35,7 @@ class Backend {
this.optionsSchema = null; this.optionsSchema = null;
this.defaultAnkiFieldTemplates = null; this.defaultAnkiFieldTemplates = null;
this.audioSystem = new AudioSystem({getAudioUri: this._getAudioUri.bind(this)}); this.audioSystem = new AudioSystem({getAudioUri: this._getAudioUri.bind(this)});
this.audioUriBuilder = new AudioUriBuilder();
this.optionsContext = { this.optionsContext = {
depth: 0, depth: 0,
url: window.location.href url: window.location.href
@ -67,7 +67,7 @@ class Backend {
['noteView', this._onApiNoteView.bind(this)], ['noteView', this._onApiNoteView.bind(this)],
['templateRender', this._onApiTemplateRender.bind(this)], ['templateRender', this._onApiTemplateRender.bind(this)],
['commandExec', this._onApiCommandExec.bind(this)], ['commandExec', this._onApiCommandExec.bind(this)],
['audioGetUrl', this._onApiAudioGetUrl.bind(this)], ['audioGetUri', this._onApiAudioGetUri.bind(this)],
['screenshotGet', this._onApiScreenshotGet.bind(this)], ['screenshotGet', this._onApiScreenshotGet.bind(this)],
['forward', this._onApiForward.bind(this)], ['forward', this._onApiForward.bind(this)],
['frameInformationGet', this._onApiFrameInformationGet.bind(this)], ['frameInformationGet', this._onApiFrameInformationGet.bind(this)],
@ -434,12 +434,11 @@ class Backend {
const templates = this.defaultAnkiFieldTemplates; const templates = this.defaultAnkiFieldTemplates;
if (mode !== 'kanji') { if (mode !== 'kanji') {
await audioInject( await this._audioInject(
definition, definition,
options.anki.terms.fields, options.anki.terms.fields,
options.audio.sources, options.audio.sources,
optionsContext, optionsContext
this.audioSystem
); );
} }
@ -514,9 +513,9 @@ class Backend {
return this._runCommand(command, params); return this._runCommand(command, params);
} }
async _onApiAudioGetUrl({definition, source, optionsContext}) { async _onApiAudioGetUri({definition, source, optionsContext}) {
const options = this.getOptions(optionsContext); const options = this.getOptions(optionsContext);
return await audioGetUrl(definition, source, options); return await this.audioUriBuilder.getUri(definition, source, options);
} }
_onApiScreenshotGet({options}, sender) { _onApiScreenshotGet({options}, sender) {
@ -772,7 +771,36 @@ class Backend {
} }
const options = this.getOptions(optionsContext); const options = this.getOptions(optionsContext);
return await audioGetUrl(definition, source, options); return await this.audioUriBuilder.getUri(definition, source, options);
}
async _audioInject(definition, fields, sources, optionsContext) {
let usesAudio = false;
for (const fieldValue of Object.values(fields)) {
if (fieldValue.includes('{audio}')) {
usesAudio = true;
break;
}
}
if (!usesAudio) {
return true;
}
try {
const expressions = definition.expressions;
const audioSourceDefinition = Array.isArray(expressions) ? expressions[0] : definition;
const {uri} = await this.audioSystem.getDefinitionAudio(audioSourceDefinition, sources, {tts: false, optionsContext});
const filename = this._createInjectedAudioFileName(audioSourceDefinition);
if (filename !== null) {
definition.audio = {url: uri, filename};
}
return true;
} catch (e) {
return false;
}
} }
async _injectScreenshot(definition, fields, screenshot) { async _injectScreenshot(definition, fields, screenshot) {
@ -815,6 +843,17 @@ class Backend {
return handlebarsRenderDynamic(template, data); return handlebarsRenderDynamic(template, data);
} }
_createInjectedAudioFileName(definition) {
const {reading, expression} = definition;
if (!reading && !expression) { return null; }
let filename = 'yomichan';
if (reading) { filename += `_${reading}`; }
if (expression) { filename += `_${expression}`; }
filename += '.mp3';
return filename;
}
static _getTabUrl(tab) { static _getTabUrl(tab) {
return new Promise((resolve) => { return new Promise((resolve) => {
chrome.tabs.sendMessage(tab.id, {action: 'getUrl'}, {frameId: 0}, (response) => { chrome.tabs.sendMessage(tab.id, {action: 'getUrl'}, {frameId: 0}, (response) => {

View File

@ -16,7 +16,7 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>. * along with this program. If not, see <https://www.gnu.org/licenses/>.
*/ */
/*global getOptionsContext, getOptionsMutable, settingsSaveOptions, apiAudioGetUrl /*global getOptionsContext, getOptionsMutable, settingsSaveOptions, apiAudioGetUri
AudioSystem, AudioSourceUI*/ AudioSystem, AudioSourceUI*/
let audioSourceUI = null; let audioSourceUI = null;
@ -26,7 +26,7 @@ async function audioSettingsInitialize() {
audioSystem = new AudioSystem({ audioSystem = new AudioSystem({
getAudioUri: async (definition, source) => { getAudioUri: async (definition, source) => {
const optionsContext = getOptionsContext(); const optionsContext = getOptionsContext();
return await apiAudioGetUrl(definition, source, optionsContext); return await apiAudioGetUri(definition, source, optionsContext);
} }
}); });

View File

@ -69,8 +69,8 @@ function apiTemplateRender(template, data) {
return _apiInvoke('templateRender', {data, template}); return _apiInvoke('templateRender', {data, template});
} }
function apiAudioGetUrl(definition, source, optionsContext) { function apiAudioGetUri(definition, source, optionsContext) {
return _apiInvoke('audioGetUrl', {definition, source, optionsContext}); return _apiInvoke('audioGetUri', {definition, source, optionsContext});
} }
function apiCommandExec(command, params) { function apiCommandExec(command, params) {

View File

@ -18,7 +18,7 @@
/*global docRangeFromPoint, docSentenceExtract /*global docRangeFromPoint, docSentenceExtract
apiKanjiFind, apiTermsFind, apiNoteView, apiOptionsGet, apiDefinitionsAddable, apiDefinitionAdd apiKanjiFind, apiTermsFind, apiNoteView, apiOptionsGet, apiDefinitionsAddable, apiDefinitionAdd
apiScreenshotGet, apiForward, apiAudioGetUrl apiScreenshotGet, apiForward, apiAudioGetUri
AudioSystem, DisplayGenerator, WindowScroll, DisplayContext, DOM*/ AudioSystem, DisplayGenerator, WindowScroll, DisplayContext, DOM*/
class Display { class Display {
@ -919,6 +919,6 @@ class Display {
async _getAudioUri(definition, source) { async _getAudioUri(definition, source) {
const optionsContext = this.getOptionsContext(); const optionsContext = this.getOptionsContext();
return await apiAudioGetUrl(definition, source, optionsContext); return await apiAudioGetUri(definition, source, optionsContext);
} }
} }