Fix display issues (#984)
* Remove unused function * Update expresionMulti detection * Simplify kanji early escape * Simplify frequency/pitch data creation * Update implementation of _buildTermMeta * Update how pitch accents sources are collected * Remove old _buildTermMeta
This commit is contained in:
parent
8ffae565c6
commit
a8ff38eec3
@ -660,56 +660,68 @@ class Translator {
|
|||||||
// Metadata building
|
// Metadata building
|
||||||
|
|
||||||
async _buildTermMeta(definitions, enabledDictionaryMap) {
|
async _buildTermMeta(definitions, enabledDictionaryMap) {
|
||||||
const terms = [];
|
const addMetadataTargetInfo = (targetMap1, target, parents) => {
|
||||||
for (const definition of definitions) {
|
let {expression, reading} = target;
|
||||||
switch (definition.type) {
|
if (!reading) { reading = expression; }
|
||||||
case 'term':
|
|
||||||
case 'termGrouped':
|
let targetMap2 = targetMap1.get(expression);
|
||||||
terms.push(definition);
|
if (typeof targetMap2 === 'undefined') {
|
||||||
break;
|
targetMap2 = new Map();
|
||||||
case 'termMerged':
|
targetMap1.set(expression, targetMap2);
|
||||||
terms.push(...definition.expressions);
|
}
|
||||||
break;
|
|
||||||
|
let targets = targetMap2.get(reading);
|
||||||
|
if (typeof targets === 'undefined') {
|
||||||
|
targets = new Set([target, ...parents]);
|
||||||
|
targetMap2.set(reading, targets);
|
||||||
|
} else {
|
||||||
|
targets.add(target);
|
||||||
|
for (const parent of parents) {
|
||||||
|
targets.add(parent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const targetMap = new Map();
|
||||||
|
const definitionsQueue = definitions.map((definition) => ({definition, parents: []}));
|
||||||
|
while (definitionsQueue.length > 0) {
|
||||||
|
const {definition, parents} = definitionsQueue.shift();
|
||||||
|
const childDefinitions = definition.definitions;
|
||||||
|
if (Array.isArray(childDefinitions)) {
|
||||||
|
for (const definition2 of childDefinitions) {
|
||||||
|
definitionsQueue.push({definition: definition2, parents: [...parents, definition]});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
addMetadataTargetInfo(targetMap, definition, parents);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const target of definition.expressions) {
|
||||||
|
addMetadataTargetInfo(targetMap, target, []);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const targetMapEntries = [...targetMap.entries()];
|
||||||
|
const uniqueExpressions = targetMapEntries.map(([expression]) => expression);
|
||||||
|
|
||||||
if (terms.length === 0) {
|
const metas = await this._database.findTermMetaBulk(uniqueExpressions, enabledDictionaryMap);
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create mapping of unique terms
|
|
||||||
const expressionsUnique = [];
|
|
||||||
const termsUnique = [];
|
|
||||||
const termsUniqueMap = new Map();
|
|
||||||
for (const term of terms) {
|
|
||||||
const {expression} = term;
|
|
||||||
let termList = termsUniqueMap.get(expression);
|
|
||||||
if (typeof termList === 'undefined') {
|
|
||||||
termList = [];
|
|
||||||
expressionsUnique.push(expression);
|
|
||||||
termsUnique.push(termList);
|
|
||||||
termsUniqueMap.set(expression, termList);
|
|
||||||
}
|
|
||||||
termList.push(term);
|
|
||||||
}
|
|
||||||
|
|
||||||
const metas = await this._database.findTermMetaBulk(expressionsUnique, enabledDictionaryMap);
|
|
||||||
for (const {expression, mode, data, dictionary, index} of metas) {
|
for (const {expression, mode, data, dictionary, index} of metas) {
|
||||||
switch (mode) {
|
const targetMap2 = targetMapEntries[index][1];
|
||||||
case 'freq':
|
for (const [reading, targets] of targetMap2) {
|
||||||
for (const term of termsUnique[index]) {
|
switch (mode) {
|
||||||
const frequencyData = this._getFrequencyData(expression, data, dictionary, term);
|
case 'freq':
|
||||||
if (frequencyData === null) { continue; }
|
{
|
||||||
term.frequencies.push(frequencyData);
|
const frequencyData = this._getFrequencyData(expression, reading, dictionary, data);
|
||||||
}
|
if (frequencyData === null) { continue; }
|
||||||
break;
|
for (const {frequencies} of targets) { frequencies.push(frequencyData); }
|
||||||
case 'pitch':
|
}
|
||||||
for (const term of termsUnique[index]) {
|
break;
|
||||||
const pitchData = await this._getPitchData(expression, data, dictionary, term);
|
case 'pitch':
|
||||||
if (pitchData === null) { continue; }
|
{
|
||||||
term.pitches.push(pitchData);
|
const pitchData = await this._getPitchData(expression, reading, dictionary, data);
|
||||||
}
|
if (pitchData === null) { continue; }
|
||||||
break;
|
for (const {pitches} of targets) { pitches.push(pitchData); }
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -796,22 +808,17 @@ class Translator {
|
|||||||
return tagMetaList;
|
return tagMetaList;
|
||||||
}
|
}
|
||||||
|
|
||||||
_getFrequencyData(expression, data, dictionary, term) {
|
_getFrequencyData(expression, reading, dictionary, data) {
|
||||||
|
let frequency = data;
|
||||||
if (data !== null && typeof data === 'object') {
|
if (data !== null && typeof data === 'object') {
|
||||||
const {frequency, reading} = data;
|
if (data.reading !== reading) { return null; }
|
||||||
|
frequency = data.frequency2;
|
||||||
const termReading = term.reading || expression;
|
|
||||||
if (reading !== termReading) { return null; }
|
|
||||||
|
|
||||||
return {expression, frequency, dictionary};
|
|
||||||
}
|
}
|
||||||
return {expression, frequency: data, dictionary};
|
return {expression, reading, dictionary, frequency};
|
||||||
}
|
}
|
||||||
|
|
||||||
async _getPitchData(expression, data, dictionary, term) {
|
async _getPitchData(expression, reading, dictionary, data) {
|
||||||
const reading = data.reading;
|
if (data.reading !== reading) { return null; }
|
||||||
const termReading = term.reading || expression;
|
|
||||||
if (reading !== termReading) { return null; }
|
|
||||||
|
|
||||||
const pitches = [];
|
const pitches = [];
|
||||||
for (let {position, tags} of data.pitches) {
|
for (let {position, tags} of data.pitches) {
|
||||||
@ -819,7 +826,7 @@ class Translator {
|
|||||||
pitches.push({position, tags});
|
pitches.push({position, tags});
|
||||||
}
|
}
|
||||||
|
|
||||||
return {reading, pitches, dictionary};
|
return {expression, reading, dictionary, pitches};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Simple helpers
|
// Simple helpers
|
||||||
|
@ -17,16 +17,13 @@
|
|||||||
|
|
||||||
class DictionaryDataUtil {
|
class DictionaryDataUtil {
|
||||||
static getPitchAccentInfos(definition) {
|
static getPitchAccentInfos(definition) {
|
||||||
if (typeof definition.character === 'string') {
|
const {type} = definition;
|
||||||
// Kanji
|
if (type === 'kanji') { return []; }
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const results = new Map();
|
const results = new Map();
|
||||||
const allExpressions = new Set();
|
const allExpressions = new Set();
|
||||||
const allReadings = new Set();
|
const allReadings = new Set();
|
||||||
const expressions = definition.expressions;
|
const sources = [definition];
|
||||||
const sources = Array.isArray(expressions) ? expressions : [definition];
|
|
||||||
|
|
||||||
for (const {pitches: expressionPitches, expression} of sources) {
|
for (const {pitches: expressionPitches, expression} of sources) {
|
||||||
allExpressions.add(expression);
|
allExpressions.add(expression);
|
||||||
|
@ -45,12 +45,12 @@ class DisplayGenerator {
|
|||||||
const debugInfoContainer = node.querySelector('.debug-info');
|
const debugInfoContainer = node.querySelector('.debug-info');
|
||||||
const bodyContainer = node.querySelector('.term-entry-body');
|
const bodyContainer = node.querySelector('.term-entry-body');
|
||||||
|
|
||||||
const {termTags, expressions, definitions} = details;
|
const {termTags, expressions, definitions, type} = details;
|
||||||
|
|
||||||
const pitches = DictionaryDataUtil.getPitchAccentInfos(details);
|
const pitches = DictionaryDataUtil.getPitchAccentInfos(details);
|
||||||
const pitchCount = pitches.reduce((i, v) => i + v.pitches.length, 0);
|
const pitchCount = pitches.reduce((i, v) => i + v.pitches.length, 0);
|
||||||
|
|
||||||
const expressionMulti = Array.isArray(expressions);
|
const expressionMulti = (type === 'termMerged' || type === 'termMergedByGlossary');
|
||||||
const definitionMulti = Array.isArray(definitions);
|
const definitionMulti = Array.isArray(definitions);
|
||||||
const expressionCount = expressionMulti ? expressions.length : 1;
|
const expressionCount = expressionMulti ? expressions.length : 1;
|
||||||
const definitionCount = definitionMulti ? definitions.length : 1;
|
const definitionCount = definitionMulti ? definitions.length : 1;
|
||||||
@ -568,51 +568,4 @@ class DisplayGenerator {
|
|||||||
container.appendChild(document.createTextNode(parts[i]));
|
container.appendChild(document.createTextNode(parts[i]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_getPitchInfos(definition) {
|
|
||||||
const results = new Map();
|
|
||||||
|
|
||||||
const allExpressions = new Set();
|
|
||||||
const allReadings = new Set();
|
|
||||||
const expressions = definition.expressions;
|
|
||||||
const sources = Array.isArray(expressions) ? expressions : [definition];
|
|
||||||
for (const {pitches: expressionPitches, expression} of sources) {
|
|
||||||
allExpressions.add(expression);
|
|
||||||
for (const {reading, pitches, dictionary} of expressionPitches) {
|
|
||||||
allReadings.add(reading);
|
|
||||||
let dictionaryResults = results.get(dictionary);
|
|
||||||
if (typeof dictionaryResults === 'undefined') {
|
|
||||||
dictionaryResults = [];
|
|
||||||
results.set(dictionary, dictionaryResults);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const {position, tags} of pitches) {
|
|
||||||
let pitchInfo = this._findExistingPitchInfo(reading, position, tags, dictionaryResults);
|
|
||||||
if (pitchInfo === null) {
|
|
||||||
pitchInfo = {expressions: new Set(), reading, position, tags};
|
|
||||||
dictionaryResults.push(pitchInfo);
|
|
||||||
}
|
|
||||||
pitchInfo.expressions.add(expression);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const dictionaryResults of results.values()) {
|
|
||||||
for (const result of dictionaryResults) {
|
|
||||||
const exclusiveExpressions = [];
|
|
||||||
const exclusiveReadings = [];
|
|
||||||
const resultExpressions = result.expressions;
|
|
||||||
if (!areSetsEqual(resultExpressions, allExpressions)) {
|
|
||||||
exclusiveExpressions.push(...getSetIntersection(resultExpressions, allExpressions));
|
|
||||||
}
|
|
||||||
if (allReadings.size > 1) {
|
|
||||||
exclusiveReadings.push(result.reading);
|
|
||||||
}
|
|
||||||
result.exclusiveExpressions = exclusiveExpressions;
|
|
||||||
result.exclusiveReadings = exclusiveReadings;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return [...results.entries()];
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user