Merge pull request #362 from toasted-nutbread/more-type-refactoring

More type refactoring
This commit is contained in:
toasted-nutbread 2020-02-22 14:50:21 -05:00 committed by GitHub
commit f8f03f3af0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 221 additions and 220 deletions

View File

@ -168,10 +168,8 @@ async function audioInject(definition, fields, sources, optionsContext) {
}
try {
let audioSourceDefinition = definition;
if (hasOwn(definition, 'expressions')) {
audioSourceDefinition = definition.expressions[0];
}
const expressions = definition.expressions;
const audioSourceDefinition = Array.isArray(expressions) ? expressions[0] : definition;
const {url} = await audioGetFromSources(audioSourceDefinition, sources, optionsContext, true);
if (url !== null) {

View File

@ -319,7 +319,8 @@ class Backend {
async _onApiTermsFind({text, details, optionsContext}) {
const options = await this.getOptions(optionsContext);
const [definitions, length] = await this.translator.findTerms(text, details, options);
const mode = options.general.resultOutputMode;
const [definitions, length] = await this.translator.findTerms(mode, text, details, options);
definitions.splice(options.general.maxResults);
return {length, definitions};
}
@ -329,9 +330,9 @@ class Backend {
const results = [];
while (text.length > 0) {
const term = [];
const [definitions, sourceLength] = await this.translator.findTermsInternal(
const [definitions, sourceLength] = await this.translator.findTerms(
'simple',
text.substring(0, options.scanning.length),
dictEnabledSet(options),
{},
options
);

View File

@ -149,15 +149,15 @@ class Database {
await Promise.all(promises);
}
async findTermsBulk(termList, titles, wildcard) {
async findTermsBulk(termList, dictionaries, wildcard) {
this._validate();
const promises = [];
const visited = {};
const visited = new Set();
const results = [];
const processRow = (row, index) => {
if (titles.includes(row.dictionary) && !hasOwn(visited, row.id)) {
visited[row.id] = true;
if (dictionaries.has(row.dictionary) && !visited.has(row.id)) {
visited.add(row.id);
results.push(Database._createTerm(row, index));
}
};
@ -184,13 +184,13 @@ class Database {
return results;
}
async findTermsExactBulk(termList, readingList, titles) {
async findTermsExactBulk(termList, readingList, dictionaries) {
this._validate();
const promises = [];
const results = [];
const processRow = (row, index) => {
if (row.reading === readingList[index] && titles.includes(row.dictionary)) {
if (row.reading === readingList[index] && dictionaries.has(row.dictionary)) {
results.push(Database._createTerm(row, index));
}
};
@ -234,16 +234,16 @@ class Database {
return results;
}
async findTermMetaBulk(termList, titles) {
return this._findGenericBulk('termMeta', 'expression', termList, titles, Database._createTermMeta);
async findTermMetaBulk(termList, dictionaries) {
return this._findGenericBulk('termMeta', 'expression', termList, dictionaries, Database._createTermMeta);
}
async findKanjiBulk(kanjiList, titles) {
return this._findGenericBulk('kanji', 'character', kanjiList, titles, Database._createKanji);
async findKanjiBulk(kanjiList, dictionaries) {
return this._findGenericBulk('kanji', 'character', kanjiList, dictionaries, Database._createKanji);
}
async findKanjiMetaBulk(kanjiList, titles) {
return this._findGenericBulk('kanjiMeta', 'character', kanjiList, titles, Database._createKanjiMeta);
async findKanjiMetaBulk(kanjiList, dictionaries) {
return this._findGenericBulk('kanjiMeta', 'character', kanjiList, dictionaries, Database._createKanjiMeta);
}
async findTagForTitle(name, title) {
@ -572,13 +572,13 @@ class Database {
return count > 0;
}
async _findGenericBulk(tableName, indexName, indexValueList, titles, createResult) {
async _findGenericBulk(tableName, indexName, indexValueList, dictionaries, createResult) {
this._validate();
const promises = [];
const results = [];
const processRow = (row, index) => {
if (titles.includes(row.dictionary)) {
if (dictionaries.has(row.dictionary)) {
results.push(createResult(row, index));
}
};

View File

@ -16,18 +16,21 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
/*global utilSetEqual, utilSetIntersection, apiTemplateRender*/
/*global apiTemplateRender*/
function dictEnabledSet(options) {
const dictionaries = {};
for (const title in options.dictionaries) {
const dictionary = options.dictionaries[title];
if (dictionary.enabled) {
dictionaries[title] = dictionary;
const enabledDictionaryMap = new Map();
const optionsDictionaries = options.dictionaries;
for (const title in optionsDictionaries) {
if (!hasOwn(optionsDictionaries, title)) { continue; }
const dictionary = optionsDictionaries[title];
if (!dictionary.enabled) { continue; }
enabledDictionaryMap.set(title, {
priority: dictionary.priority || 0,
allowSecondarySearches: !!dictionary.allowSecondarySearches
});
}
}
return dictionaries;
return enabledDictionaryMap;
}
function dictConfigured(options) {
@ -40,28 +43,15 @@ function dictConfigured(options) {
return false;
}
function dictRowsSort(rows, options) {
return rows.sort((ra, rb) => {
const pa = (options.dictionaries[ra.title] || {}).priority || 0;
const pb = (options.dictionaries[rb.title] || {}).priority || 0;
if (pa > pb) {
return -1;
} else if (pa < pb) {
return 1;
} else {
return 0;
}
});
}
function dictTermsSort(definitions, dictionaries=null) {
return definitions.sort((v1, v2) => {
let i;
if (dictionaries !== null) {
i = (
((dictionaries[v2.dictionary] || {}).priority || 0) -
((dictionaries[v1.dictionary] || {}).priority || 0)
);
const dictionaryInfo1 = dictionaries.get(v1.dictionary);
const dictionaryInfo2 = dictionaries.get(v2.dictionary);
const priority1 = typeof dictionaryInfo1 !== 'undefined' ? dictionaryInfo1.priority : 0;
const priority2 = typeof dictionaryInfo2 !== 'undefined' ? dictionaryInfo2.priority : 0;
i = priority2 - priority1;
if (i !== 0) { return i; }
}
@ -79,20 +69,16 @@ function dictTermsSort(definitions, dictionaries=null) {
}
function dictTermsUndupe(definitions) {
const definitionGroups = {};
const definitionGroups = new Map();
for (const definition of definitions) {
const definitionExisting = definitionGroups[definition.id];
if (!hasOwn(definitionGroups, definition.id) || definition.expression.length > definitionExisting.expression.length) {
definitionGroups[definition.id] = definition;
const id = definition.id;
const definitionExisting = definitionGroups.get(id);
if (typeof definitionExisting === 'undefined' || definition.expression.length > definitionExisting.expression.length) {
definitionGroups.set(id, definition);
}
}
const definitionsUnique = [];
for (const key in definitionGroups) {
definitionsUnique.push(definitionGroups[key]);
}
return definitionsUnique;
return [...definitionGroups.values()];
}
function dictTermsCompressTags(definitions) {
@ -123,35 +109,35 @@ function dictTermsCompressTags(definitions) {
}
function dictTermsGroup(definitions, dictionaries) {
const groups = {};
const groups = new Map();
for (const definition of definitions) {
const key = [definition.source, definition.expression];
key.push(...definition.reasons);
const key = [definition.source, definition.expression, ...definition.reasons];
if (definition.reading) {
key.push(definition.reading);
}
const keyString = key.toString();
if (hasOwn(groups, keyString)) {
groups[keyString].push(definition);
} else {
groups[keyString] = [definition];
let groupDefinitions = groups.get(keyString);
if (typeof groupDefinitions === 'undefined') {
groupDefinitions = [];
groups.set(keyString, groupDefinitions);
}
groupDefinitions.push(definition);
}
const results = [];
for (const key in groups) {
const groupDefs = groups[key];
const firstDef = groupDefs[0];
dictTermsSort(groupDefs, dictionaries);
for (const groupDefinitions of groups.values()) {
const firstDef = groupDefinitions[0];
dictTermsSort(groupDefinitions, dictionaries);
results.push({
definitions: groupDefs,
definitions: groupDefinitions,
expression: firstDef.expression,
reading: firstDef.reading,
furiganaSegments: firstDef.furiganaSegments,
reasons: firstDef.reasons,
termTags: firstDef.termTags,
score: groupDefs.reduce((p, v) => v.score > p ? v.score : p, Number.MIN_SAFE_INTEGER),
score: groupDefinitions.reduce((p, v) => v.score > p ? v.score : p, Number.MIN_SAFE_INTEGER),
source: firstDef.source
});
}
@ -159,6 +145,30 @@ function dictTermsGroup(definitions, dictionaries) {
return dictTermsSort(results);
}
function dictAreSetsEqual(set1, set2) {
if (set1.size !== set2.size) {
return false;
}
for (const value of set1) {
if (!set2.has(value)) {
return false;
}
}
return true;
}
function dictGetSetIntersection(set1, set2) {
const result = [];
for (const value of set1) {
if (set2.has(value)) {
result.push(value);
}
}
return result;
}
function dictTermsMergeBySequence(definitions, mainDictionary) {
const sequencedDefinitions = new Map();
const nonSequencedDefinitions = [];
@ -189,89 +199,103 @@ function dictTermsMergeBySequence(definitions, mainDictionary) {
return [sequencedDefinitions, nonSequencedDefinitions];
}
function dictTermsMergeByGloss(result, definitions, appendTo, mergedIndices) {
const definitionsByGloss = appendTo || {};
for (const [index, definition] of definitions.entries()) {
if (appendTo) {
let match = false;
for (const expression of result.expressions.keys()) {
if (definition.expression === expression) {
for (const reading of result.expressions.get(expression).keys()) {
if (definition.reading === reading) {
match = true;
break;
}
}
}
if (match) {
break;
}
}
function dictTermsMergeByGloss(result, definitions, appendTo=null, mergedIndices=null) {
const definitionsByGloss = appendTo !== null ? appendTo : new Map();
if (!match) {
continue;
} else if (mergedIndices) {
const resultExpressionsMap = result.expressions;
const resultExpressionSet = result.expression;
const resultReadingSet = result.reading;
const resultSource = result.source;
for (const [index, definition] of definitions.entries()) {
const {expression, reading} = definition;
if (mergedIndices !== null) {
const expressionMap = resultExpressionsMap.get(expression);
if (
typeof expressionMap !== 'undefined' &&
typeof expressionMap.get(reading) !== 'undefined'
) {
mergedIndices.add(index);
} else {
continue;
}
}
const gloss = JSON.stringify(definition.glossary.concat(definition.dictionary));
if (!definitionsByGloss[gloss]) {
definitionsByGloss[gloss] = {
let glossDefinition = definitionsByGloss.get(gloss);
if (typeof glossDefinition === 'undefined') {
glossDefinition = {
expression: new Set(),
reading: new Set(),
definitionTags: [],
glossary: definition.glossary,
source: result.source,
source: resultSource,
reasons: [],
score: definition.score,
id: definition.id,
dictionary: definition.dictionary
};
definitionsByGloss.set(gloss, glossDefinition);
}
definitionsByGloss[gloss].expression.add(definition.expression);
definitionsByGloss[gloss].reading.add(definition.reading);
glossDefinition.expression.add(expression);
glossDefinition.reading.add(reading);
result.expression.add(definition.expression);
result.reading.add(definition.reading);
resultExpressionSet.add(expression);
resultReadingSet.add(reading);
for (const tag of definition.definitionTags) {
if (!definitionsByGloss[gloss].definitionTags.find((existingTag) => existingTag.name === tag.name)) {
definitionsByGloss[gloss].definitionTags.push(tag);
if (!glossDefinition.definitionTags.find((existingTag) => existingTag.name === tag.name)) {
glossDefinition.definitionTags.push(tag);
}
}
if (!appendTo) {
// result->expressions[ Expression1[ Reading1[ Tag1, Tag2 ] ], Expression2, ... ]
if (!result.expressions.has(definition.expression)) {
result.expressions.set(definition.expression, new Map());
if (appendTo === null) {
/*
Data layout:
resultExpressionsMap = new Map([
[expression, new Map([
[reading, new Map([
[tagName, tagInfo],
...
])],
...
])],
...
]);
*/
let readingMap = resultExpressionsMap.get(expression);
if (typeof readingMap === 'undefined') {
readingMap = new Map();
resultExpressionsMap.set(expression, readingMap);
}
if (!result.expressions.get(definition.expression).has(definition.reading)) {
result.expressions.get(definition.expression).set(definition.reading, []);
let termTagsMap = readingMap.get(reading);
if (typeof termTagsMap === 'undefined') {
termTagsMap = new Map();
readingMap.set(reading, termTagsMap);
}
for (const tag of definition.termTags) {
if (!result.expressions.get(definition.expression).get(definition.reading).find((existingTag) => existingTag.name === tag.name)) {
result.expressions.get(definition.expression).get(definition.reading).push(tag);
if (!termTagsMap.has(tag.name)) {
termTagsMap.set(tag.name, tag);
}
}
}
}
for (const gloss in definitionsByGloss) {
const definition = definitionsByGloss[gloss];
definition.only = [];
if (!utilSetEqual(definition.expression, result.expression)) {
for (const expression of utilSetIntersection(definition.expression, result.expression)) {
definition.only.push(expression);
}
}
if (!utilSetEqual(definition.reading, result.reading)) {
for (const reading of utilSetIntersection(definition.reading, result.reading)) {
definition.only.push(reading);
for (const definition of definitionsByGloss.values()) {
const only = [];
const expressionSet = definition.expression;
const readingSet = definition.reading;
if (!dictAreSetsEqual(expressionSet, resultExpressionSet)) {
only.push(...dictGetSetIntersection(expressionSet, resultExpressionSet));
}
if (!dictAreSetsEqual(readingSet, resultReadingSet)) {
only.push(...dictGetSetIntersection(readingSet, resultReadingSet));
}
definition.only = only;
}
return definitionsByGloss;

View File

@ -491,15 +491,18 @@ function dictionaryErrorsShow(errors) {
dialog.textContent = '';
if (errors !== null && errors.length > 0) {
const uniqueErrors = {};
const uniqueErrors = new Map();
for (let e of errors) {
console.error(e);
e = dictionaryErrorToString(e);
uniqueErrors[e] = hasOwn(uniqueErrors, e) ? uniqueErrors[e] + 1 : 1;
let count = uniqueErrors.get(e);
if (typeof count === 'undefined') {
count = 0;
}
uniqueErrors.set(e, count + 1);
}
for (const e in uniqueErrors) {
const count = uniqueErrors[e];
for (const [e, count] of uniqueErrors.entries()) {
const div = document.createElement('p');
if (count > 1) {
div.textContent = `${e} `;

View File

@ -70,8 +70,8 @@ class Translator {
return {sequencedDefinitions, defaultDefinitions};
}
async getMergedSecondarySearchResults(text, expressionsMap, secondarySearchTitles) {
if (secondarySearchTitles.length === 0) {
async getMergedSecondarySearchResults(text, expressionsMap, secondarySearchDictionaries) {
if (secondarySearchDictionaries.size === 0) {
return [];
}
@ -85,7 +85,7 @@ class Translator {
}
}
const definitions = await this.database.findTermsExactBulk(expressionList, readingList, secondarySearchTitles);
const definitions = await this.database.findTermsExactBulk(expressionList, readingList, secondarySearchDictionaries);
for (const definition of definitions) {
const definitionTags = await this.expandTags(definition.definitionTags, definition.dictionary);
definitionTags.push(dictTagBuildSource(definition.dictionary));
@ -101,7 +101,7 @@ class Translator {
return definitions;
}
async getMergedDefinition(text, dictionaries, sequencedDefinition, defaultDefinitions, secondarySearchTitles, mergedByTermIndices) {
async getMergedDefinition(text, dictionaries, sequencedDefinition, defaultDefinitions, secondarySearchDictionaries, mergedByTermIndices) {
const result = sequencedDefinition.definitions;
const rawDefinitionsBySequence = sequencedDefinition.rawDefinitions;
@ -114,12 +114,11 @@ class Translator {
}
const definitionsByGloss = dictTermsMergeByGloss(result, rawDefinitionsBySequence);
const secondarySearchResults = await this.getMergedSecondarySearchResults(text, result.expressions, secondarySearchTitles);
const secondarySearchResults = await this.getMergedSecondarySearchResults(text, result.expressions, secondarySearchDictionaries);
dictTermsMergeByGloss(result, defaultDefinitions.concat(secondarySearchResults), definitionsByGloss, mergedByTermIndices);
for (const gloss in definitionsByGloss) {
const definition = definitionsByGloss[gloss];
for (const definition of definitionsByGloss.values()) {
dictTagsSort(definition.definitionTags);
result.definitions.push(definition);
}
@ -128,7 +127,8 @@ class Translator {
const expressions = [];
for (const [expression, readingMap] of result.expressions.entries()) {
for (const [reading, termTags] of readingMap.entries()) {
for (const [reading, termTagsMap] of readingMap.entries()) {
const termTags = [...termTagsMap.values()];
const score = termTags.map((tag) => tag.score).reduce((p, v) => p + v, 0);
expressions.push(Translator.createExpression(expression, reading, dictTagsSort(termTags), Translator.scoreToTermFrequency(score)));
}
@ -141,14 +141,16 @@ class Translator {
return result;
}
async findTerms(text, details, options) {
switch (options.general.resultOutputMode) {
async findTerms(mode, text, details, options) {
switch (mode) {
case 'group':
return await this.findTermsGrouped(text, details, options);
case 'merge':
return await this.findTermsMerged(text, details, options);
case 'split':
return await this.findTermsSplit(text, details, options);
case 'simple':
return await this.findTermsSimple(text, details, options);
default:
return [[], 0];
}
@ -156,11 +158,10 @@ class Translator {
async findTermsGrouped(text, details, options) {
const dictionaries = dictEnabledSet(options);
const titles = Object.keys(dictionaries);
const [definitions, length] = await this.findTermsInternal(text, dictionaries, details, options);
const definitionsGrouped = dictTermsGroup(definitions, dictionaries);
await this.buildTermMeta(definitionsGrouped, titles);
await this.buildTermMeta(definitionsGrouped, dictionaries);
if (options.general.compactTags) {
for (const definition of definitionsGrouped) {
@ -173,8 +174,12 @@ class Translator {
async findTermsMerged(text, details, options) {
const dictionaries = dictEnabledSet(options);
const secondarySearchTitles = Object.keys(options.dictionaries).filter((dict) => options.dictionaries[dict].allowSecondarySearches);
const titles = Object.keys(dictionaries);
const secondarySearchDictionaries = new Map();
for (const [title, dictionary] of dictionaries.entries()) {
if (!dictionary.allowSecondarySearches) { continue; }
secondarySearchDictionaries.set(title, dictionary);
}
const [definitions, length] = await this.findTermsInternal(text, dictionaries, details, options);
const {sequencedDefinitions, defaultDefinitions} = await this.getSequencedDefinitions(definitions, options.general.mainDictionary);
const definitionsMerged = [];
@ -186,7 +191,7 @@ class Translator {
dictionaries,
sequencedDefinition,
defaultDefinitions,
secondarySearchTitles,
secondarySearchDictionaries,
mergedByTermIndices
);
definitionsMerged.push(result);
@ -198,7 +203,7 @@ class Translator {
definitionsMerged.push(groupedDefinition);
}
await this.buildTermMeta(definitionsMerged, titles);
await this.buildTermMeta(definitionsMerged, dictionaries);
if (options.general.compactTags) {
for (const definition of definitionsMerged) {
@ -211,25 +216,28 @@ class Translator {
async findTermsSplit(text, details, options) {
const dictionaries = dictEnabledSet(options);
const titles = Object.keys(dictionaries);
const [definitions, length] = await this.findTermsInternal(text, dictionaries, details, options);
await this.buildTermMeta(definitions, titles);
await this.buildTermMeta(definitions, dictionaries);
return [definitions, length];
}
async findTermsSimple(text, details, options) {
const dictionaries = dictEnabledSet(options);
return await this.findTermsInternal(text, dictionaries, details, options);
}
async findTermsInternal(text, dictionaries, details, options) {
text = Translator.getSearchableText(text, options);
if (text.length === 0) {
return [[], 0];
}
const titles = Object.keys(dictionaries);
const deinflections = (
details.wildcard ?
await this.findTermWildcard(text, titles, details.wildcard) :
await this.findTermDeinflections(text, titles, options)
await this.findTermWildcard(text, dictionaries, details.wildcard) :
await this.findTermDeinflections(text, dictionaries, options)
);
let definitions = [];
@ -271,8 +279,8 @@ class Translator {
return [definitions, length];
}
async findTermWildcard(text, titles, wildcard) {
const definitions = await this.database.findTermsBulk([text], titles, wildcard);
async findTermWildcard(text, dictionaries, wildcard) {
const definitions = await this.database.findTermsBulk([text], dictionaries, wildcard);
if (definitions.length === 0) {
return [];
}
@ -287,7 +295,7 @@ class Translator {
}];
}
async findTermDeinflections(text, titles, options) {
async findTermDeinflections(text, dictionaries, options) {
const deinflections = this.getAllDeinflections(text, options);
if (deinflections.length === 0) {
@ -309,7 +317,7 @@ class Translator {
deinflectionArray.push(deinflection);
}
const definitions = await this.database.findTermsBulk(uniqueDeinflectionTerms, titles, null);
const definitions = await this.database.findTermsBulk(uniqueDeinflectionTerms, dictionaries, null);
for (const definition of definitions) {
const definitionRules = Deinflector.rulesToRuleFlags(definition.rules);
@ -399,17 +407,12 @@ class Translator {
async findKanji(text, options) {
const dictionaries = dictEnabledSet(options);
const titles = Object.keys(dictionaries);
const kanjiUnique = {};
const kanjiList = [];
const kanjiUnique = new Set();
for (const c of text) {
if (!hasOwn(kanjiUnique, c)) {
kanjiList.push(c);
kanjiUnique[c] = true;
}
kanjiUnique.add(c);
}
const definitions = await this.database.findKanjiBulk(kanjiList, titles);
const definitions = await this.database.findKanjiBulk([...kanjiUnique], dictionaries);
if (definitions.length === 0) {
return definitions;
}
@ -429,12 +432,12 @@ class Translator {
definition.stats = stats;
}
await this.buildKanjiMeta(definitions, titles);
await this.buildKanjiMeta(definitions, dictionaries);
return definitions;
}
async buildTermMeta(definitions, titles) {
async buildTermMeta(definitions, dictionaries) {
const terms = [];
for (const definition of definitions) {
if (definition.expressions) {
@ -468,7 +471,7 @@ class Translator {
term.frequencies = [];
}
const metas = await this.database.findTermMetaBulk(expressionsUnique, titles);
const metas = await this.database.findTermMetaBulk(expressionsUnique, dictionaries);
for (const {expression, mode, data, dictionary, index} of metas) {
switch (mode) {
case 'freq':
@ -480,14 +483,14 @@ class Translator {
}
}
async buildKanjiMeta(definitions, titles) {
async buildKanjiMeta(definitions, dictionaries) {
const kanjiList = [];
for (const definition of definitions) {
kanjiList.push(definition.character);
definition.frequencies = [];
}
const metas = await this.database.findKanjiMetaBulk(kanjiList, titles);
const metas = await this.database.findKanjiMetaBulk(kanjiList, dictionaries);
for (const {character, mode, data, dictionary, index} of metas) {
switch (mode) {
case 'freq':
@ -510,28 +513,29 @@ class Translator {
const names = Object.keys(items);
const tagMetaList = await this.getTagMetaList(names, title);
const stats = {};
const statsGroups = new Map();
for (let i = 0; i < names.length; ++i) {
const name = names[i];
const meta = tagMetaList[i];
if (meta === null) { continue; }
const category = meta.category;
const group = (
hasOwn(stats, category) ?
stats[category] :
(stats[category] = [])
);
let group = statsGroups.get(category);
if (typeof group === 'undefined') {
group = [];
statsGroups.set(category, group);
}
const stat = Object.assign({}, meta, {name, value: items[name]});
group.push(dictTagSanitize(stat));
}
const stats = {};
const sortCompare = (a, b) => a.notes - b.notes;
for (const category in stats) {
stats[category].sort(sortCompare);
for (const [category, group] of statsGroups.entries()) {
group.sort(sortCompare);
stats[category] = group;
}
return stats;
}

View File

@ -59,32 +59,6 @@ function utilBackgroundFunctionIsolate(func) {
return backgroundPage.utilFunctionIsolate(func);
}
function utilSetEqual(setA, setB) {
if (setA.size !== setB.size) {
return false;
}
for (const value of setA) {
if (!setB.has(value)) {
return false;
}
}
return true;
}
function utilSetIntersection(setA, setB) {
return new Set(
[...setA].filter((value) => setB.has(value))
);
}
function utilSetDifference(setA, setB) {
return new Set(
[...setA].filter((value) => !setB.has(value))
);
}
function utilStringHashCode(string) {
let hashCode = 0;

View File

@ -72,19 +72,15 @@ class TextToSpeechAudio {
const m = /^tts:[^#?]*\?([^#]*)/.exec(ttsUri);
if (m === null) { return null; }
const searchParameters = {};
for (const group of m[1].split('&')) {
const sep = group.indexOf('=');
if (sep < 0) { continue; }
searchParameters[decodeURIComponent(group.substring(0, sep))] = decodeURIComponent(group.substring(sep + 1));
}
const searchParameters = new URLSearchParams(m[1]);
const text = searchParameters.get('text');
let voice = searchParameters.get('voice');
if (text === null || voice === null) { return null; }
if (!searchParameters.text) { return null; }
const voice = audioGetTextToSpeechVoice(searchParameters.voice);
voice = audioGetTextToSpeechVoice(voice);
if (voice === null) { return null; }
return new TextToSpeechAudio(searchParameters.text, voice);
return new TextToSpeechAudio(text, voice);
}
}

View File

@ -158,7 +158,7 @@ class TextScanner {
onTouchEnd(e) {
if (
this.primaryTouchIdentifier === null ||
TextScanner.getIndexOfTouch(e.changedTouches, this.primaryTouchIdentifier) < 0
TextScanner.getTouch(e.changedTouches, this.primaryTouchIdentifier) === null
) {
return;
}
@ -181,13 +181,11 @@ class TextScanner {
return;
}
const touches = e.changedTouches;
const index = TextScanner.getIndexOfTouch(touches, this.primaryTouchIdentifier);
if (index < 0) {
const primaryTouch = TextScanner.getTouch(e.changedTouches, this.primaryTouchIdentifier);
if (primaryTouch === null) {
return;
}
const primaryTouch = touches[index];
this.searchAt(primaryTouch.clientX, primaryTouch.clientY, 'touchMove');
e.preventDefault(); // Disable scroll
@ -356,13 +354,12 @@ class TextScanner {
}
}
static getIndexOfTouch(touchList, identifier) {
for (const i in touchList) {
const t = touchList[i];
if (t.identifier === identifier) {
return i;
static getTouch(touchList, identifier) {
for (const touch of touchList) {
if (touch.identifier === identifier) {
return touch;
}
}
return -1;
return null;
}
}

View File

@ -136,7 +136,9 @@ async function testDatabase1() {
const testDictionaryIndex = JSON.parse(await testDictionary.files['index.json'].async('string'));
const title = testDictionaryIndex.title;
const titles = [title];
const titles = new Map([
[title, {priority: 0, allowSecondarySearches: false}]
]);
// Setup iteration data
const iterations = [
@ -815,7 +817,9 @@ async function testDatabase2() {
const testDictionaryIndex = JSON.parse(await testDictionary.files['index.json'].async('string'));
const title = testDictionaryIndex.title;
const titles = [title];
const titles = new Map([
[title, {priority: 0, allowSecondarySearches: false}]
]);
// Setup database
const database = new Database();