Rename pitchAccent in source code (#1852)
* Rename pitchAccentPosition to pitchAccentDownstepPosition * Rename function * Rename * Rename * pitches => pronunciations
This commit is contained in:
parent
a9710b8acf
commit
b0596c8a3c
@ -157,10 +157,10 @@ class AnkiNoteDataCreator {
|
|||||||
_getPitches(dictionaryEntry) {
|
_getPitches(dictionaryEntry) {
|
||||||
const results = [];
|
const results = [];
|
||||||
if (dictionaryEntry.type === 'term') {
|
if (dictionaryEntry.type === 'term') {
|
||||||
for (const {dictionary, pitches} of DictionaryDataUtil.getPitchAccentInfos(dictionaryEntry)) {
|
for (const {dictionary, pronunciations} of DictionaryDataUtil.getGroupedPronunciations(dictionaryEntry)) {
|
||||||
const pitches2 = [];
|
const pitches = [];
|
||||||
for (const {terms, reading, position, nasalPositions, devoicePositions, tags, exclusiveTerms, exclusiveReadings} of pitches) {
|
for (const {terms, reading, position, nasalPositions, devoicePositions, tags, exclusiveTerms, exclusiveReadings} of pronunciations) {
|
||||||
pitches2.push({
|
pitches.push({
|
||||||
expressions: terms,
|
expressions: terms,
|
||||||
reading,
|
reading,
|
||||||
position,
|
position,
|
||||||
@ -171,7 +171,7 @@ class AnkiNoteDataCreator {
|
|||||||
exclusiveReadings
|
exclusiveReadings
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
results.push({dictionary, pitches: pitches2});
|
results.push({dictionary, pitches});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return results;
|
return results;
|
||||||
|
@ -51,14 +51,14 @@ class DisplayGenerator {
|
|||||||
|
|
||||||
const headwordsContainer = node.querySelector('.headword-list');
|
const headwordsContainer = node.querySelector('.headword-list');
|
||||||
const inflectionsContainer = node.querySelector('.inflection-list');
|
const inflectionsContainer = node.querySelector('.inflection-list');
|
||||||
const pitchesContainer = node.querySelector('.pronunciation-group-list');
|
const groupedPronunciationsContainer = node.querySelector('.pronunciation-group-list');
|
||||||
const frequencyGroupListContainer = node.querySelector('.frequency-group-list');
|
const frequencyGroupListContainer = node.querySelector('.frequency-group-list');
|
||||||
const definitionsContainer = node.querySelector('.definition-list');
|
const definitionsContainer = node.querySelector('.definition-list');
|
||||||
const headwordTagsContainer = node.querySelector('.headword-list-tag-list');
|
const headwordTagsContainer = node.querySelector('.headword-list-tag-list');
|
||||||
|
|
||||||
const {headwords, type, inflections, definitions, frequencies, pronunciations} = dictionaryEntry;
|
const {headwords, type, inflections, definitions, frequencies, pronunciations} = dictionaryEntry;
|
||||||
const pitches = DictionaryDataUtil.getPitchAccentInfos(dictionaryEntry);
|
const groupedPronunciations = DictionaryDataUtil.getGroupedPronunciations(dictionaryEntry);
|
||||||
const pitchCount = pitches.reduce((i, v) => i + v.pitches.length, 0);
|
const pronunciationCount = groupedPronunciations.reduce((i, v) => i + v.pronunciations.length, 0);
|
||||||
const groupedFrequencies = DictionaryDataUtil.groupTermFrequencies(dictionaryEntry);
|
const groupedFrequencies = DictionaryDataUtil.groupTermFrequencies(dictionaryEntry);
|
||||||
const termTags = DictionaryDataUtil.groupTermTags(dictionaryEntry);
|
const termTags = DictionaryDataUtil.groupTermTags(dictionaryEntry);
|
||||||
|
|
||||||
@ -72,8 +72,8 @@ class DisplayGenerator {
|
|||||||
node.dataset.format = type;
|
node.dataset.format = type;
|
||||||
node.dataset.headwordCount = `${headwords.length}`;
|
node.dataset.headwordCount = `${headwords.length}`;
|
||||||
node.dataset.definitionCount = `${definitions.length}`;
|
node.dataset.definitionCount = `${definitions.length}`;
|
||||||
node.dataset.pronunciationDictionaryCount = `${pitches.length}`;
|
node.dataset.pronunciationDictionaryCount = `${groupedPronunciations.length}`;
|
||||||
node.dataset.pronunciationCount = `${pitchCount}`;
|
node.dataset.pronunciationCount = `${pronunciationCount}`;
|
||||||
node.dataset.uniqueTermCount = `${uniqueTerms.size}`;
|
node.dataset.uniqueTermCount = `${uniqueTerms.size}`;
|
||||||
node.dataset.uniqueReadingCount = `${uniqueReadings.size}`;
|
node.dataset.uniqueReadingCount = `${uniqueReadings.size}`;
|
||||||
node.dataset.frequencyCount = `${frequencies.length}`;
|
node.dataset.frequencyCount = `${frequencies.length}`;
|
||||||
@ -88,7 +88,7 @@ class DisplayGenerator {
|
|||||||
|
|
||||||
this._appendMultiple(inflectionsContainer, this._createTermInflection.bind(this), inflections);
|
this._appendMultiple(inflectionsContainer, this._createTermInflection.bind(this), inflections);
|
||||||
this._appendMultiple(frequencyGroupListContainer, this._createFrequencyGroup.bind(this), groupedFrequencies, false);
|
this._appendMultiple(frequencyGroupListContainer, this._createFrequencyGroup.bind(this), groupedFrequencies, false);
|
||||||
this._appendMultiple(pitchesContainer, this._createPitches.bind(this), pitches);
|
this._appendMultiple(groupedPronunciationsContainer, this._createGroupedPronunciation.bind(this), groupedPronunciations);
|
||||||
this._appendMultiple(headwordTagsContainer, this._createTermTag.bind(this), termTags, headwords.length);
|
this._appendMultiple(headwordTagsContainer, this._createTermTag.bind(this), termTags, headwords.length);
|
||||||
|
|
||||||
for (const term of uniqueTerms) {
|
for (const term of uniqueTerms) {
|
||||||
@ -432,19 +432,19 @@ class DisplayGenerator {
|
|||||||
return this._createTag(this._createTagData(text, 'search'));
|
return this._createTag(this._createTagData(text, 'search'));
|
||||||
}
|
}
|
||||||
|
|
||||||
_createPitches(details) {
|
_createGroupedPronunciation(details) {
|
||||||
const {dictionary, pitches} = details;
|
const {dictionary, pronunciations} = details;
|
||||||
|
|
||||||
const node = this._templates.instantiate('pronunciation-group');
|
const node = this._templates.instantiate('pronunciation-group');
|
||||||
node.dataset.dictionary = dictionary;
|
node.dataset.dictionary = dictionary;
|
||||||
node.dataset.pitchesMulti = 'true';
|
node.dataset.pronunciationsMulti = 'true';
|
||||||
node.dataset.pitchesCount = `${pitches.length}`;
|
node.dataset.pronunciationsCount = `${pronunciations.length}`;
|
||||||
|
|
||||||
const tag = this._createTag(this._createTagData(dictionary, 'pronunciation-dictionary'));
|
const tag = this._createTag(this._createTagData(dictionary, 'pronunciation-dictionary'));
|
||||||
node.querySelector('.pronunciation-group-tag-list').appendChild(tag);
|
node.querySelector('.pronunciation-group-tag-list').appendChild(tag);
|
||||||
|
|
||||||
let hasTags = false;
|
let hasTags = false;
|
||||||
for (const {tags} of pitches) {
|
for (const {tags} of pronunciations) {
|
||||||
if (tags.length > 0) {
|
if (tags.length > 0) {
|
||||||
hasTags = true;
|
hasTags = true;
|
||||||
break;
|
break;
|
||||||
@ -453,12 +453,12 @@ class DisplayGenerator {
|
|||||||
|
|
||||||
const n = node.querySelector('.pronunciation-list');
|
const n = node.querySelector('.pronunciation-list');
|
||||||
n.dataset.hasTags = `${hasTags}`;
|
n.dataset.hasTags = `${hasTags}`;
|
||||||
this._appendMultiple(n, this._createPitch.bind(this), pitches);
|
this._appendMultiple(n, this._createPronunciation.bind(this), pronunciations);
|
||||||
|
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
_createPitch(details) {
|
_createPronunciation(details) {
|
||||||
const jp = this._japaneseUtil;
|
const jp = this._japaneseUtil;
|
||||||
const {reading, position, nasalPositions, devoicePositions, tags, exclusiveTerms, exclusiveReadings} = details;
|
const {reading, position, nasalPositions, devoicePositions, tags, exclusiveTerms, exclusiveReadings} = details;
|
||||||
const morae = jp.getKanaMorae(reading);
|
const morae = jp.getKanaMorae(reading);
|
||||||
@ -474,7 +474,7 @@ class DisplayGenerator {
|
|||||||
this._appendMultiple(n, this._createTag.bind(this), tags);
|
this._appendMultiple(n, this._createTag.bind(this), tags);
|
||||||
|
|
||||||
n = node.querySelector('.pronunciation-disambiguation-list');
|
n = node.querySelector('.pronunciation-disambiguation-list');
|
||||||
this._createPitchAccentDisambiguations(n, exclusiveTerms, exclusiveReadings);
|
this._createPronunciationDisambiguations(n, exclusiveTerms, exclusiveReadings);
|
||||||
|
|
||||||
n = node.querySelector('.pronunciation-downstep-notation-container');
|
n = node.querySelector('.pronunciation-downstep-notation-container');
|
||||||
n.appendChild(this._pronunciationGenerator.createPronunciationDownstepPosition(position));
|
n.appendChild(this._pronunciationGenerator.createPronunciationDownstepPosition(position));
|
||||||
@ -488,7 +488,7 @@ class DisplayGenerator {
|
|||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
_createPitchAccentDisambiguations(container, exclusiveTerms, exclusiveReadings) {
|
_createPronunciationDisambiguations(container, exclusiveTerms, exclusiveReadings) {
|
||||||
const templateName = 'pronunciation-disambiguation';
|
const templateName = 'pronunciation-disambiguation';
|
||||||
for (const term of exclusiveTerms) {
|
for (const term of exclusiveTerms) {
|
||||||
const node = this._templates.instantiate(templateName);
|
const node = this._templates.instantiate(templateName);
|
||||||
|
@ -90,7 +90,7 @@ class DictionaryDataUtil {
|
|||||||
return this._createFrequencyGroupsFromMap(map1);
|
return this._createFrequencyGroupsFromMap(map1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static getPitchAccentInfos(dictionaryEntry) {
|
static getGroupedPronunciations(dictionaryEntry) {
|
||||||
const {headwords, pronunciations} = dictionaryEntry;
|
const {headwords, pronunciations} = dictionaryEntry;
|
||||||
|
|
||||||
const allTerms = new Set();
|
const allTerms = new Set();
|
||||||
@ -100,18 +100,18 @@ class DictionaryDataUtil {
|
|||||||
allReadings.add(reading);
|
allReadings.add(reading);
|
||||||
}
|
}
|
||||||
|
|
||||||
const pitchAccentInfoMap = new Map();
|
const groupedPronunciationsMap = new Map();
|
||||||
for (const {headwordIndex, dictionary, pitches} of pronunciations) {
|
for (const {headwordIndex, dictionary, pitches} of pronunciations) {
|
||||||
const {term, reading} = headwords[headwordIndex];
|
const {term, reading} = headwords[headwordIndex];
|
||||||
let dictionaryPitchAccentInfoList = pitchAccentInfoMap.get(dictionary);
|
let dictionaryGroupedPronunciationList = groupedPronunciationsMap.get(dictionary);
|
||||||
if (typeof dictionaryPitchAccentInfoList === 'undefined') {
|
if (typeof dictionaryGroupedPronunciationList === 'undefined') {
|
||||||
dictionaryPitchAccentInfoList = [];
|
dictionaryGroupedPronunciationList = [];
|
||||||
pitchAccentInfoMap.set(dictionary, dictionaryPitchAccentInfoList);
|
groupedPronunciationsMap.set(dictionary, dictionaryGroupedPronunciationList);
|
||||||
}
|
}
|
||||||
for (const {position, nasalPositions, devoicePositions, tags} of pitches) {
|
for (const {position, nasalPositions, devoicePositions, tags} of pitches) {
|
||||||
let pitchAccentInfo = this._findExistingPitchAccentInfo(reading, position, nasalPositions, devoicePositions, tags, dictionaryPitchAccentInfoList);
|
let groupedPronunciation = this._findExistingGroupedPronunciation(reading, position, nasalPositions, devoicePositions, tags, dictionaryGroupedPronunciationList);
|
||||||
if (pitchAccentInfo === null) {
|
if (groupedPronunciation === null) {
|
||||||
pitchAccentInfo = {
|
groupedPronunciation = {
|
||||||
terms: new Set(),
|
terms: new Set(),
|
||||||
reading,
|
reading,
|
||||||
position,
|
position,
|
||||||
@ -121,29 +121,29 @@ class DictionaryDataUtil {
|
|||||||
exclusiveTerms: [],
|
exclusiveTerms: [],
|
||||||
exclusiveReadings: []
|
exclusiveReadings: []
|
||||||
};
|
};
|
||||||
dictionaryPitchAccentInfoList.push(pitchAccentInfo);
|
dictionaryGroupedPronunciationList.push(groupedPronunciation);
|
||||||
}
|
}
|
||||||
pitchAccentInfo.terms.add(term);
|
groupedPronunciation.terms.add(term);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const multipleReadings = (allReadings.size > 1);
|
const multipleReadings = (allReadings.size > 1);
|
||||||
for (const dictionaryPitchAccentInfoList of pitchAccentInfoMap.values()) {
|
for (const dictionaryGroupedPronunciationList of groupedPronunciationsMap.values()) {
|
||||||
for (const pitchAccentInfo of dictionaryPitchAccentInfoList) {
|
for (const groupedPronunciation of dictionaryGroupedPronunciationList) {
|
||||||
const {terms, reading, exclusiveTerms, exclusiveReadings} = pitchAccentInfo;
|
const {terms, reading, exclusiveTerms, exclusiveReadings} = groupedPronunciation;
|
||||||
if (!this._areSetsEqual(terms, allTerms)) {
|
if (!this._areSetsEqual(terms, allTerms)) {
|
||||||
exclusiveTerms.push(...this._getSetIntersection(terms, allTerms));
|
exclusiveTerms.push(...this._getSetIntersection(terms, allTerms));
|
||||||
}
|
}
|
||||||
if (multipleReadings) {
|
if (multipleReadings) {
|
||||||
exclusiveReadings.push(reading);
|
exclusiveReadings.push(reading);
|
||||||
}
|
}
|
||||||
pitchAccentInfo.terms = [...terms];
|
groupedPronunciation.terms = [...terms];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const results2 = [];
|
const results2 = [];
|
||||||
for (const [dictionary, pitches] of pitchAccentInfoMap.entries()) {
|
for (const [dictionary, pronunciations2] of groupedPronunciationsMap.entries()) {
|
||||||
results2.push({dictionary, pitches});
|
results2.push({dictionary, pronunciations: pronunciations2});
|
||||||
}
|
}
|
||||||
return results2;
|
return results2;
|
||||||
}
|
}
|
||||||
@ -230,8 +230,8 @@ class DictionaryDataUtil {
|
|||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
static _findExistingPitchAccentInfo(reading, position, nasalPositions, devoicePositions, tags, pitchAccentInfoList) {
|
static _findExistingGroupedPronunciation(reading, position, nasalPositions, devoicePositions, tags, groupedPronunciationList) {
|
||||||
for (const pitchInfo of pitchAccentInfoList) {
|
for (const pitchInfo of groupedPronunciationList) {
|
||||||
if (
|
if (
|
||||||
pitchInfo.reading === reading &&
|
pitchInfo.reading === reading &&
|
||||||
pitchInfo.position === position &&
|
pitchInfo.position === position &&
|
||||||
|
@ -239,26 +239,26 @@ const JapaneseUtil = (() => {
|
|||||||
|
|
||||||
// Mora functions
|
// Mora functions
|
||||||
|
|
||||||
isMoraPitchHigh(moraIndex, pitchAccentPosition) {
|
isMoraPitchHigh(moraIndex, pitchAccentDownstepPosition) {
|
||||||
switch (pitchAccentPosition) {
|
switch (pitchAccentDownstepPosition) {
|
||||||
case 0: return (moraIndex > 0);
|
case 0: return (moraIndex > 0);
|
||||||
case 1: return (moraIndex < 1);
|
case 1: return (moraIndex < 1);
|
||||||
default: return (moraIndex > 0 && moraIndex < pitchAccentPosition);
|
default: return (moraIndex > 0 && moraIndex < pitchAccentDownstepPosition);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
getPitchCategory(text, pitchAccentPosition, isVerbOrAdjective) {
|
getPitchCategory(text, pitchAccentDownstepPosition, isVerbOrAdjective) {
|
||||||
if (pitchAccentPosition === 0) {
|
if (pitchAccentDownstepPosition === 0) {
|
||||||
return 'heiban';
|
return 'heiban';
|
||||||
}
|
}
|
||||||
if (isVerbOrAdjective) {
|
if (isVerbOrAdjective) {
|
||||||
return pitchAccentPosition > 0 ? 'kifuku' : null;
|
return pitchAccentDownstepPosition > 0 ? 'kifuku' : null;
|
||||||
}
|
}
|
||||||
if (pitchAccentPosition === 1) {
|
if (pitchAccentDownstepPosition === 1) {
|
||||||
return 'atamadaka';
|
return 'atamadaka';
|
||||||
}
|
}
|
||||||
if (pitchAccentPosition > 1) {
|
if (pitchAccentDownstepPosition > 1) {
|
||||||
return pitchAccentPosition >= this.getKanaMoraCount(text) ? 'odaka' : 'nakadaka';
|
return pitchAccentDownstepPosition >= this.getKanaMoraCount(text) ? 'odaka' : 'nakadaka';
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -811,8 +811,8 @@ function testIsMoraPitchHigh() {
|
|||||||
[[3, 4], true]
|
[[3, 4], true]
|
||||||
];
|
];
|
||||||
|
|
||||||
for (const [[moraIndex, pitchAccentPosition], expected] of data) {
|
for (const [[moraIndex, pitchAccentDownstepPosition], expected] of data) {
|
||||||
const actual = jp.isMoraPitchHigh(moraIndex, pitchAccentPosition);
|
const actual = jp.isMoraPitchHigh(moraIndex, pitchAccentDownstepPosition);
|
||||||
assert.strictEqual(actual, expected);
|
assert.strictEqual(actual, expected);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user