Fix display issues (#984)

* Remove unused function

* Update expresionMulti detection

* Simplify kanji early escape

* Simplify frequency/pitch data creation

* Update implementation of _buildTermMeta

* Update how pitch accents sources are collected

* Remove old _buildTermMeta
This commit is contained in:
toasted-nutbread 2020-11-01 21:24:35 -05:00 committed by GitHub
parent 8ffae565c6
commit a8ff38eec3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 71 additions and 114 deletions

View File

@ -660,56 +660,68 @@ class Translator {
// Metadata building
async _buildTermMeta(definitions, enabledDictionaryMap) {
const terms = [];
for (const definition of definitions) {
switch (definition.type) {
case 'term':
case 'termGrouped':
terms.push(definition);
break;
case 'termMerged':
terms.push(...definition.expressions);
break;
const addMetadataTargetInfo = (targetMap1, target, parents) => {
let {expression, reading} = target;
if (!reading) { reading = expression; }
let targetMap2 = targetMap1.get(expression);
if (typeof targetMap2 === 'undefined') {
targetMap2 = new Map();
targetMap1.set(expression, targetMap2);
}
let targets = targetMap2.get(reading);
if (typeof targets === 'undefined') {
targets = new Set([target, ...parents]);
targetMap2.set(reading, targets);
} else {
targets.add(target);
for (const parent of parents) {
targets.add(parent);
}
}
};
const targetMap = new Map();
const definitionsQueue = definitions.map((definition) => ({definition, parents: []}));
while (definitionsQueue.length > 0) {
const {definition, parents} = definitionsQueue.shift();
const childDefinitions = definition.definitions;
if (Array.isArray(childDefinitions)) {
for (const definition2 of childDefinitions) {
definitionsQueue.push({definition: definition2, parents: [...parents, definition]});
}
} else {
addMetadataTargetInfo(targetMap, definition, parents);
}
for (const target of definition.expressions) {
addMetadataTargetInfo(targetMap, target, []);
}
}
const targetMapEntries = [...targetMap.entries()];
const uniqueExpressions = targetMapEntries.map(([expression]) => expression);
if (terms.length === 0) {
return;
}
// Create mapping of unique terms
const expressionsUnique = [];
const termsUnique = [];
const termsUniqueMap = new Map();
for (const term of terms) {
const {expression} = term;
let termList = termsUniqueMap.get(expression);
if (typeof termList === 'undefined') {
termList = [];
expressionsUnique.push(expression);
termsUnique.push(termList);
termsUniqueMap.set(expression, termList);
}
termList.push(term);
}
const metas = await this._database.findTermMetaBulk(expressionsUnique, enabledDictionaryMap);
const metas = await this._database.findTermMetaBulk(uniqueExpressions, enabledDictionaryMap);
for (const {expression, mode, data, dictionary, index} of metas) {
switch (mode) {
case 'freq':
for (const term of termsUnique[index]) {
const frequencyData = this._getFrequencyData(expression, data, dictionary, term);
if (frequencyData === null) { continue; }
term.frequencies.push(frequencyData);
}
break;
case 'pitch':
for (const term of termsUnique[index]) {
const pitchData = await this._getPitchData(expression, data, dictionary, term);
if (pitchData === null) { continue; }
term.pitches.push(pitchData);
}
break;
const targetMap2 = targetMapEntries[index][1];
for (const [reading, targets] of targetMap2) {
switch (mode) {
case 'freq':
{
const frequencyData = this._getFrequencyData(expression, reading, dictionary, data);
if (frequencyData === null) { continue; }
for (const {frequencies} of targets) { frequencies.push(frequencyData); }
}
break;
case 'pitch':
{
const pitchData = await this._getPitchData(expression, reading, dictionary, data);
if (pitchData === null) { continue; }
for (const {pitches} of targets) { pitches.push(pitchData); }
}
break;
}
}
}
}
@ -796,22 +808,17 @@ class Translator {
return tagMetaList;
}
_getFrequencyData(expression, data, dictionary, term) {
_getFrequencyData(expression, reading, dictionary, data) {
let frequency = data;
if (data !== null && typeof data === 'object') {
const {frequency, reading} = data;
const termReading = term.reading || expression;
if (reading !== termReading) { return null; }
return {expression, frequency, dictionary};
if (data.reading !== reading) { return null; }
frequency = data.frequency2;
}
return {expression, frequency: data, dictionary};
return {expression, reading, dictionary, frequency};
}
async _getPitchData(expression, data, dictionary, term) {
const reading = data.reading;
const termReading = term.reading || expression;
if (reading !== termReading) { return null; }
async _getPitchData(expression, reading, dictionary, data) {
if (data.reading !== reading) { return null; }
const pitches = [];
for (let {position, tags} of data.pitches) {
@ -819,7 +826,7 @@ class Translator {
pitches.push({position, tags});
}
return {reading, pitches, dictionary};
return {expression, reading, dictionary, pitches};
}
// Simple helpers

View File

@ -17,16 +17,13 @@
class DictionaryDataUtil {
static getPitchAccentInfos(definition) {
if (typeof definition.character === 'string') {
// Kanji
return [];
}
const {type} = definition;
if (type === 'kanji') { return []; }
const results = new Map();
const allExpressions = new Set();
const allReadings = new Set();
const expressions = definition.expressions;
const sources = Array.isArray(expressions) ? expressions : [definition];
const sources = [definition];
for (const {pitches: expressionPitches, expression} of sources) {
allExpressions.add(expression);

View File

@ -45,12 +45,12 @@ class DisplayGenerator {
const debugInfoContainer = node.querySelector('.debug-info');
const bodyContainer = node.querySelector('.term-entry-body');
const {termTags, expressions, definitions} = details;
const {termTags, expressions, definitions, type} = details;
const pitches = DictionaryDataUtil.getPitchAccentInfos(details);
const pitchCount = pitches.reduce((i, v) => i + v.pitches.length, 0);
const expressionMulti = Array.isArray(expressions);
const expressionMulti = (type === 'termMerged' || type === 'termMergedByGlossary');
const definitionMulti = Array.isArray(definitions);
const expressionCount = expressionMulti ? expressions.length : 1;
const definitionCount = definitionMulti ? definitions.length : 1;
@ -568,51 +568,4 @@ class DisplayGenerator {
container.appendChild(document.createTextNode(parts[i]));
}
}
_getPitchInfos(definition) {
const results = new Map();
const allExpressions = new Set();
const allReadings = new Set();
const expressions = definition.expressions;
const sources = Array.isArray(expressions) ? expressions : [definition];
for (const {pitches: expressionPitches, expression} of sources) {
allExpressions.add(expression);
for (const {reading, pitches, dictionary} of expressionPitches) {
allReadings.add(reading);
let dictionaryResults = results.get(dictionary);
if (typeof dictionaryResults === 'undefined') {
dictionaryResults = [];
results.set(dictionary, dictionaryResults);
}
for (const {position, tags} of pitches) {
let pitchInfo = this._findExistingPitchInfo(reading, position, tags, dictionaryResults);
if (pitchInfo === null) {
pitchInfo = {expressions: new Set(), reading, position, tags};
dictionaryResults.push(pitchInfo);
}
pitchInfo.expressions.add(expression);
}
}
}
for (const dictionaryResults of results.values()) {
for (const result of dictionaryResults) {
const exclusiveExpressions = [];
const exclusiveReadings = [];
const resultExpressions = result.expressions;
if (!areSetsEqual(resultExpressions, allExpressions)) {
exclusiveExpressions.push(...getSetIntersection(resultExpressions, allExpressions));
}
if (allReadings.size > 1) {
exclusiveReadings.push(result.reading);
}
result.exclusiveExpressions = exclusiveExpressions;
result.exclusiveReadings = exclusiveReadings;
}
}
return [...results.entries()];
}
}