Audio system refactor (#858)

* Refactor AudioUriBuilder

* Add downloadAudio function

* Refactor AudioSystem

* Update API usage

* Rename file

* Update scripts

* Add prepare calls
This commit is contained in:
toasted-nutbread 2020-09-26 13:41:26 -04:00 committed by GitHub
parent 22932e02cb
commit 0b51488f1f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 190 additions and 209 deletions

View File

@ -22,13 +22,12 @@
<script src="/mixed/js/environment.js"></script>
<script src="/mixed/js/japanese.js"></script>
<script src="/mixed/js/audio-system.js"></script>
<script src="/mixed/js/cache-map.js"></script>
<script src="/mixed/js/dictionary-data-util.js"></script>
<script src="/mixed/js/object-property-accessor.js"></script>
<script src="/bg/js/anki.js"></script>
<script src="/bg/js/audio-uri-builder.js"></script>
<script src="/bg/js/audio-downloader.js"></script>
<script src="/bg/js/clipboard-monitor.js"></script>
<script src="/bg/js/clipboard-reader.js"></script>
<script src="/bg/js/database.js"></script>

View File

@ -20,20 +20,54 @@
* jp
*/
class AudioUriBuilder {
class AudioDownloader {
constructor({requestBuilder}) {
this._requestBuilder = requestBuilder;
this._getUrlHandlers = new Map([
['jpod101', this._getUriJpod101.bind(this)],
['jpod101-alternate', this._getUriJpod101Alternate.bind(this)],
['jisho', this._getUriJisho.bind(this)],
['text-to-speech', this._getUriTextToSpeech.bind(this)],
['text-to-speech-reading', this._getUriTextToSpeechReading.bind(this)],
['custom', this._getUriCustom.bind(this)]
this._getInfoHandlers = new Map([
['jpod101', this._getInfoJpod101.bind(this)],
['jpod101-alternate', this._getInfoJpod101Alternate.bind(this)],
['jisho', this._getInfoJisho.bind(this)],
['text-to-speech', this._getInfoTextToSpeech.bind(this)],
['text-to-speech-reading', this._getInfoTextToSpeechReading.bind(this)],
['custom', this._getInfoCustom.bind(this)]
]);
}
normalizeUrl(url, baseUrl, basePath) {
async getInfo(source, expression, reading, details) {
const handler = this._getInfoHandlers.get(source);
if (typeof handler === 'function') {
try {
return await handler(expression, reading, details);
} catch (e) {
// NOP
}
}
return null;
}
async downloadAudio(sources, expression, reading, details) {
for (const source of sources) {
const info = await this.getInfo(source, expression, reading, details);
if (info === null) { continue; }
switch (info.type) {
case 'url':
try {
const {details: {url}} = info;
return await this._downloadAudioFromUrl(url);
} catch (e) {
// NOP
}
break;
}
}
throw new Error('Could not download audio');
}
// Private
_normalizeUrl(url, baseUrl, basePath) {
if (url) {
if (url[0] === '/') {
if (url.length >= 2 && url[1] === '/') {
@ -51,19 +85,7 @@ class AudioUriBuilder {
return url;
}
async getUri(source, expression, reading, details) {
const handler = this._getUrlHandlers.get(source);
if (typeof handler === 'function') {
try {
return await handler(expression, reading, details);
} catch (e) {
// NOP
}
}
return null;
}
async _getUriJpod101(expression, reading) {
async _getInfoJpod101(expression, reading) {
let kana = reading;
let kanji = expression;
@ -80,10 +102,11 @@ class AudioUriBuilder {
params.push(`kana=${encodeURIComponent(kana)}`);
}
return `https://assets.languagepod101.com/dictionary/japanese/audiomp3.php?${params.join('&')}`;
const url = `https://assets.languagepod101.com/dictionary/japanese/audiomp3.php?${params.join('&')}`;
return {type: 'url', details: {url}};
}
async _getUriJpod101Alternate(expression, reading) {
async _getInfoJpod101Alternate(expression, reading) {
const fetchUrl = 'https://www.japanesepod101.com/learningcenter/reference/dictionary_post';
const data = `post=dictionary_reference&match_type=exact&search_query=${encodeURIComponent(expression)}&vulgar=true`;
const response = await this._requestBuilder.fetchAnonymous(fetchUrl, {
@ -109,7 +132,7 @@ class AudioUriBuilder {
const source = dom.getElementByTagName('source', audio);
if (source === null) { continue; }
const url = dom.getAttribute(source, 'src');
let url = dom.getAttribute(source, 'src');
if (url === null) { continue; }
const htmlReadings = dom.getElementsByClassName('dc-vocab_kana');
@ -117,7 +140,8 @@ class AudioUriBuilder {
const htmlReading = dom.getTextContent(htmlReadings[0]);
if (htmlReading && (!reading || reading === htmlReading)) {
return this.normalizeUrl(url, 'https://www.japanesepod101.com', '/learningcenter/reference/');
url = this._normalizeUrl(url, 'https://www.japanesepod101.com', '/learningcenter/reference/');
return {type: 'url', details: {url}};
}
} catch (e) {
// NOP
@ -127,7 +151,7 @@ class AudioUriBuilder {
throw new Error('Failed to find audio URL');
}
async _getUriJisho(expression, reading) {
async _getInfoJisho(expression, reading) {
const fetchUrl = `https://jisho.org/search/${expression}`;
const response = await this._requestBuilder.fetchAnonymous(fetchUrl, {
method: 'GET',
@ -145,9 +169,10 @@ class AudioUriBuilder {
if (audio !== null) {
const source = dom.getElementByTagName('source', audio);
if (source !== null) {
const url = dom.getAttribute(source, 'src');
let url = dom.getAttribute(source, 'src');
if (url !== null) {
return this.normalizeUrl(url, 'https://jisho.org', '/search/');
url = this._normalizeUrl(url, 'https://jisho.org', '/search/');
return {type: 'url', details: {url}};
}
}
}
@ -158,25 +183,72 @@ class AudioUriBuilder {
throw new Error('Failed to find audio URL');
}
async _getUriTextToSpeech(expression, reading, {textToSpeechVoice}) {
async _getInfoTextToSpeech(expression, reading, {textToSpeechVoice}) {
if (!textToSpeechVoice) {
throw new Error('No voice');
}
return `tts:?text=${encodeURIComponent(expression)}&voice=${encodeURIComponent(textToSpeechVoice)}`;
return {type: 'tts', details: {text: expression, voice: textToSpeechVoice}};
}
async _getUriTextToSpeechReading(expression, reading, {textToSpeechVoice}) {
async _getInfoTextToSpeechReading(expression, reading, {textToSpeechVoice}) {
if (!textToSpeechVoice) {
throw new Error('No voice');
}
return `tts:?text=${encodeURIComponent(reading || expression)}&voice=${encodeURIComponent(textToSpeechVoice)}`;
return {type: 'tts', details: {text: reading || expression, voice: textToSpeechVoice}};
}
async _getUriCustom(expression, reading, {customSourceUrl}) {
async _getInfoCustom(expression, reading, {customSourceUrl}) {
if (typeof customSourceUrl !== 'string') {
throw new Error('No custom URL defined');
}
const data = {expression, reading};
return customSourceUrl.replace(/\{([^}]*)\}/g, (m0, m1) => (hasOwn(data, m1) ? `${data[m1]}` : m0));
const url = customSourceUrl.replace(/\{([^}]*)\}/g, (m0, m1) => (hasOwn(data, m1) ? `${data[m1]}` : m0));
return {type: 'url', details: {url}};
}
async _downloadAudioFromUrl(url) {
const response = await this._requestBuilder.fetchAnonymous(url, {
method: 'GET',
mode: 'cors',
cache: 'default',
credentials: 'omit',
redirect: 'follow',
referrerPolicy: 'no-referrer'
});
if (!response.ok) {
throw new Error(`Invalid response: ${response.status}`);
}
const arrayBuffer = await response.arrayBuffer();
if (!await this._isAudioBinaryValid(arrayBuffer)) {
throw new Error('Could not retrieve audio');
}
return this._arrayBufferToBase64(arrayBuffer);
}
async _isAudioBinaryValid(arrayBuffer) {
const digest = await this._arrayBufferDigest(arrayBuffer);
switch (digest) {
case 'ae6398b5a27bc8c0a771df6c907ade794be15518174773c58c7c7ddd17098906': // jpod101 invalid audio
return false;
default:
return true;
}
}
async _arrayBufferDigest(arrayBuffer) {
const hash = new Uint8Array(await crypto.subtle.digest('SHA-256', new Uint8Array(arrayBuffer)));
let digest = '';
for (const byte of hash) {
digest += byte.toString(16).padStart(2, '0');
}
return digest;
}
_arrayBufferToBase64(arrayBuffer) {
return btoa(String.fromCharCode(...new Uint8Array(arrayBuffer)));
}
}

View File

@ -17,8 +17,7 @@
/* global
* AnkiConnect
* AudioSystem
* AudioUriBuilder
* AudioDownloader
* ClipboardMonitor
* ClipboardReader
* DictionaryDatabase
@ -54,14 +53,9 @@ class Backend {
this._profileConditionsUtil = new ProfileConditions();
this._defaultAnkiFieldTemplates = null;
this._requestBuilder = new RequestBuilder();
this._audioUriBuilder = new AudioUriBuilder({
this._audioDownloader = new AudioDownloader({
requestBuilder: this._requestBuilder
});
this._audioSystem = new AudioSystem({
audioUriBuilder: this._audioUriBuilder,
requestBuilder: this._requestBuilder,
useCache: false
});
this._optionsUtil = new OptionsUtil();
this._searchPopupTabId = null;
@ -91,7 +85,8 @@ class Backend {
['injectAnkiNoteMedia', {async: true, contentScript: true, handler: this._onApiInjectAnkiNoteMedia.bind(this)}],
['noteView', {async: true, contentScript: true, handler: this._onApiNoteView.bind(this)}],
['commandExec', {async: false, contentScript: true, handler: this._onApiCommandExec.bind(this)}],
['audioGetUri', {async: true, contentScript: true, handler: this._onApiAudioGetUri.bind(this)}],
['getDefinitionAudioInfo', {async: true, contentScript: true, handler: this._onApiGetDefinitionAudioInfo.bind(this)}],
['downloadDefinitionAudio', {async: true, contentScript: true, handler: this._onApiDownloadDefinitionAudio.bind(this)}],
['screenshotGet', {async: true, contentScript: true, handler: this._onApiScreenshotGet.bind(this)}],
['sendMessageToFrame', {async: false, contentScript: true, handler: this._onApiSendMessageToFrame.bind(this)}],
['broadcastTab', {async: false, contentScript: true, handler: this._onApiBroadcastTab.bind(this)}],
@ -117,7 +112,6 @@ class Backend {
['setAllSettings', {async: true, contentScript: false, handler: this._onApiSetAllSettings.bind(this)}],
['getOrCreateSearchPopup', {async: true, contentScript: true, handler: this._onApiGetOrCreateSearchPopup.bind(this)}],
['isTabSearchPopup', {async: true, contentScript: true, handler: this._onApiIsTabSearchPopup.bind(this)}],
['getDefinitionAudio', {async: true, contentScript: true, handler: this._onApiGetDefinitionAudio.bind(this)}],
['triggerDatabaseUpdated', {async: false, contentScript: true, handler: this._onApiTriggerDatabaseUpdated.bind(this)}]
]);
this._messageHandlersWithProgress = new Map([
@ -479,8 +473,12 @@ class Backend {
return this._runCommand(command, params);
}
async _onApiAudioGetUri({source, expression, reading, details}) {
return await this._audioUriBuilder.getUri(source, expression, reading, details);
async _onApiGetDefinitionAudioInfo({source, expression, reading, details}) {
return await this._audioDownloader.getInfo(source, expression, reading, details);
}
async _onApiDownloadDefinitionAudio({sources, expression, reading, details}) {
return await this._downloadDefinitionAudio(sources, expression, reading, details);
}
_onApiScreenshotGet({options}, sender) {
@ -728,10 +726,6 @@ class Backend {
return (tab !== null);
}
async _onApiGetDefinitionAudio({sources, expression, reading, details}) {
return this._getDefinitionAudio(sources, expression, reading, details);
}
_onApiTriggerDatabaseUpdated({type, cause}) {
this._triggerDatabaseUpdated(type, cause);
}
@ -1511,8 +1505,8 @@ class Backend {
}
}
async _getDefinitionAudio(sources, expression, reading, details) {
return await this._audioSystem.getDefinitionAudio(sources, expression, reading, details);
async _downloadDefinitionAudio(sources, expression, reading, details) {
return await this._audioDownloader.downloadAudio(sources, expression, reading, details);
}
async _injectAnkNoteMedia(ankiConnect, expression, reading, timestamp, audioDetails, screenshotDetails, clipboardImage) {
@ -1548,7 +1542,7 @@ class Backend {
fileName = this._replaceInvalidFileNameCharacters(fileName);
const {sources, customSourceUrl} = details;
const {audio: data} = await this._getDefinitionAudio(
const data = await this._downloadDefinitionAudio(
sources,
expression,
reading,

View File

@ -22,17 +22,16 @@
class AudioController {
constructor(settingsController) {
this._settingsController = settingsController;
this._audioSystem = null;
this._audioSystem = new AudioSystem({
cacheSize: 0
});
this._audioSourceContainer = null;
this._audioSourceAddButton = null;
this._audioSourceEntries = [];
}
async prepare() {
this._audioSystem = new AudioSystem({
audioUriBuilder: null,
useCache: true
});
this._audioSystem.prepare();
this._audioSourceContainer = document.querySelector('.audio-source-list');
this._audioSourceAddButton = document.querySelector('.audio-source-add');

View File

@ -75,6 +75,7 @@
<script src="/mixed/js/api.js"></script>
<script src="/mixed/js/japanese.js"></script>
<script src="/mixed/js/cache-map.js"></script>
<script src="/mixed/js/document-util.js"></script>
<script src="/fg/js/dom-text-scanner.js"></script>
<script src="/fg/js/source.js"></script>

View File

@ -50,6 +50,7 @@
<script src="/mixed/js/api.js"></script>
<script src="/mixed/js/japanese.js"></script>
<script src="/mixed/js/cache-map.js"></script>
<script src="/mixed/js/document-util.js"></script>
<script src="/fg/js/dom-text-scanner.js"></script>
<script src="/fg/js/source.js"></script>

View File

@ -85,8 +85,12 @@ const api = (() => {
return this._invoke('noteView', {noteId});
}
audioGetUri(source, expression, reading, details) {
return this._invoke('audioGetUri', {source, expression, reading, details});
getDefinitionAudioInfo(source, expression, reading, details) {
return this._invoke('getDefinitionAudioInfo', {source, expression, reading, details});
}
downloadDefinitionAudio(sources, expression, reading, details) {
return this._invoke('downloadDefinitionAudio', {sources, expression, reading, details});
}
commandExec(command, params) {
@ -189,10 +193,6 @@ const api = (() => {
return this._invoke('isTabSearchPopup', {tabId});
}
getDefinitionAudio(sources, expression, reading, details) {
return this._invoke('getDefinitionAudio', {sources, expression, reading, details});
}
triggerDatabaseUpdated(type, cause) {
return this._invoke('triggerDatabaseUpdated', {type, cause});
}

View File

@ -16,101 +16,72 @@
*/
/* global
* CacheMap
* TextToSpeechAudio
*/
class AudioSystem {
constructor({audioUriBuilder, requestBuilder=null, useCache}) {
this._cache = useCache ? new Map() : null;
this._cacheSizeMaximum = 32;
this._audioUriBuilder = audioUriBuilder;
this._requestBuilder = requestBuilder;
constructor({getAudioInfo, cacheSize=32}) {
this._cache = new CacheMap(cacheSize);
this._getAudioInfo = getAudioInfo;
}
if (typeof speechSynthesis !== 'undefined') {
prepare() {
// speechSynthesis.getVoices() will not be populated unless some API call is made.
speechSynthesis.addEventListener('voiceschanged', this._onVoicesChanged.bind(this));
}
if (typeof speechSynthesis === 'undefined') { return; }
const eventListeners = new EventListenerCollection();
const onVoicesChanged = () => { eventListeners.removeAllEventListeners(); };
eventListeners.addEventListener(speechSynthesis, 'voiceschanged', onVoicesChanged, false);
}
async getDefinitionAudio(sources, expression, reading, details) {
const key = `${expression}:${reading}`;
const hasCache = (this._cache !== null && !details.disableCache);
async createDefinitionAudio(sources, expression, reading, details) {
const key = [expression, reading];
if (hasCache) {
const cacheValue = this._cache.get(key);
if (typeof cacheValue !== 'undefined') {
const {audio, uri, source} = cacheValue;
const {audio, source} = cacheValue;
const index = sources.indexOf(source);
if (index >= 0) {
return {audio, uri, index};
}
return {audio, index};
}
}
for (let i = 0, ii = sources.length; i < ii; ++i) {
const source = sources[i];
const uri = await this._getAudioUri(source, expression, reading, details);
if (uri === null) { continue; }
const info = await this._getAudioInfo(source, expression, reading, details);
if (info === null) { continue; }
let audio;
try {
const audio = (
details.binary ?
await this._createAudioBinary(uri) :
await this._createAudio(uri)
);
if (hasCache) {
this._cacheCheck();
this._cache.set(key, {audio, uri, source});
switch (info.type) {
case 'url':
{
const {details: {url}} = info;
audio = await this.createAudio(url);
}
break;
case 'tts':
{
const {details: {text, voice}} = info;
audio = this.createTextToSpeechAudio(text, voice);
}
break;
default:
throw new Error(`Unsupported type: ${info.type}`);
}
return {audio, uri, index: i};
} catch (e) {
// NOP
continue;
}
this._cache.set(key, {audio, source});
return {audio, index: i};
}
throw new Error('Could not create audio');
}
createTextToSpeechAudio(text, voiceUri) {
const voice = this._getTextToSpeechVoiceFromVoiceUri(voiceUri);
if (voice === null) {
throw new Error('Invalid text-to-speech voice');
}
return new TextToSpeechAudio(text, voice);
}
_onVoicesChanged() {
// NOP
}
_getAudioUri(source, expression, reading, details) {
return (
this._audioUriBuilder !== null ?
this._audioUriBuilder.getUri(source, expression, reading, details) :
null
);
}
async _createAudio(uri) {
const ttsParameters = this._getTextToSpeechParameters(uri);
if (ttsParameters !== null) {
const {text, voiceUri} = ttsParameters;
return this.createTextToSpeechAudio(text, voiceUri);
}
return await this._createAudioFromUrl(uri);
}
async _createAudioBinary(uri) {
const ttsParameters = this._getTextToSpeechParameters(uri);
if (ttsParameters !== null) {
throw new Error('Cannot create audio from text-to-speech');
}
return await this._createAudioBinaryFromUrl(uri);
}
_createAudioFromUrl(url) {
createAudio(url) {
return new Promise((resolve, reject) => {
const audio = new Audio(url);
audio.addEventListener('loadeddata', () => {
@ -124,27 +95,15 @@ class AudioSystem {
});
}
async _createAudioBinaryFromUrl(url) {
const response = await this._requestBuilder.fetchAnonymous(url, {
method: 'GET',
mode: 'cors',
cache: 'default',
credentials: 'omit',
redirect: 'follow',
referrerPolicy: 'no-referrer'
});
const arrayBuffer = await response.arrayBuffer();
if (!await this._isAudioBinaryValid(arrayBuffer)) {
throw new Error('Could not retrieve audio');
createTextToSpeechAudio(text, voiceUri) {
const voice = this._getTextToSpeechVoiceFromVoiceUri(voiceUri);
if (voice === null) {
throw new Error('Invalid text-to-speech voice');
}
return new TextToSpeechAudio(text, voice);
}
return this._arrayBufferToBase64(arrayBuffer);
}
_arrayBufferToBase64(arrayBuffer) {
return btoa(String.fromCharCode(...new Uint8Array(arrayBuffer)));
}
// Private
_isAudioValid(audio) {
const duration = audio.duration;
@ -154,16 +113,6 @@ class AudioSystem {
);
}
async _isAudioBinaryValid(arrayBuffer) {
const digest = await AudioSystem.arrayBufferDigest(arrayBuffer);
switch (digest) {
case 'ae6398b5a27bc8c0a771df6c907ade794be15518174773c58c7c7ddd17098906': // jpod101 invalid audio
return false;
default:
return true;
}
}
_getTextToSpeechVoiceFromVoiceUri(voiceUri) {
try {
for (const voice of speechSynthesis.getVoices()) {
@ -176,38 +125,4 @@ class AudioSystem {
}
return null;
}
_getTextToSpeechParameters(uri) {
const m = /^tts:[^#?]*\?([^#]*)/.exec(uri);
if (m === null) { return null; }
const searchParameters = new URLSearchParams(m[1]);
const text = searchParameters.get('text');
const voiceUri = searchParameters.get('voice');
return (text !== null && voiceUri !== null ? {text, voiceUri} : null);
}
_cacheCheck() {
const removeCount = this._cache.size - this._cacheSizeMaximum;
if (removeCount <= 0) { return; }
const removeKeys = [];
for (const key of this._cache.keys()) {
removeKeys.push(key);
if (removeKeys.length >= removeCount) { break; }
}
for (const key of removeKeys) {
this._cache.delete(key);
}
}
static async arrayBufferDigest(arrayBuffer) {
const hash = new Uint8Array(await crypto.subtle.digest('SHA-256', new Uint8Array(arrayBuffer)));
let digest = '';
for (const byte of hash) {
digest += byte.toString(16).padStart(2, '0');
}
return digest;
}
}

View File

@ -43,12 +43,7 @@ class Display extends EventDispatcher {
this._audioPlaying = null;
this._audioFallback = null;
this._audioSystem = new AudioSystem({
audioUriBuilder: {
getUri: async (source, expression, reading, details) => {
return await api.audioGetUri(source, expression, reading, details);
}
},
useCache: true
getAudioInfo: this._getAudioInfo.bind(this)
});
this._styleNode = null;
this._eventListeners = new EventListenerCollection();
@ -165,6 +160,7 @@ class Display extends EventDispatcher {
}
async prepare() {
this._audioSystem.prepare();
this._updateMode();
this._setInteractive(true);
await this._displayGenerator.prepare();
@ -1096,7 +1092,7 @@ class Display extends EventDispatcher {
try {
const {sources, textToSpeechVoice, customSourceUrl} = this._options.audio;
let index;
({audio, index} = await this._audioSystem.getDefinitionAudio(sources, expression, reading, {textToSpeechVoice, customSourceUrl}));
({audio, index} = await this._audioSystem.createDefinitionAudio(sources, expression, reading, {textToSpeechVoice, customSourceUrl}));
info = `From source ${1 + index}: ${sources[index]}`;
} catch (e) {
if (this._audioFallback === null) {
@ -1419,4 +1415,8 @@ class Display extends EventDispatcher {
modeOptions
});
}
async _getAudioInfo(source, expression, reading, details) {
return await api.getDefinitionAudioInfo(source, expression, reading, details);
}
}