Merge pull request #424 from toasted-nutbread/dictionary-importer
Dictionary importer
This commit is contained in:
commit
3684a479c5
@ -30,6 +30,7 @@
|
||||
<script src="/bg/js/clipboard-monitor.js"></script>
|
||||
<script src="/bg/js/conditions.js"></script>
|
||||
<script src="/bg/js/database.js"></script>
|
||||
<script src="/bg/js/dictionary-importer.js"></script>
|
||||
<script src="/bg/js/deinflector.js"></script>
|
||||
<script src="/bg/js/dictionary.js"></script>
|
||||
<script src="/bg/js/handlebars.js"></script>
|
||||
|
@ -24,6 +24,8 @@
|
||||
* AudioUriBuilder
|
||||
* BackendApiForwarder
|
||||
* ClipboardMonitor
|
||||
* Database
|
||||
* DictionaryImporter
|
||||
* JsonSchema
|
||||
* Mecab
|
||||
* Translator
|
||||
@ -43,7 +45,9 @@
|
||||
|
||||
class Backend {
|
||||
constructor() {
|
||||
this.translator = new Translator();
|
||||
this.database = new Database();
|
||||
this.dictionaryImporter = new DictionaryImporter();
|
||||
this.translator = new Translator(this.database);
|
||||
this.anki = new AnkiNull();
|
||||
this.mecab = new Mecab();
|
||||
this.clipboardMonitor = new ClipboardMonitor({getClipboard: this._onApiClipboardGet.bind(this)});
|
||||
@ -107,6 +111,7 @@ class Backend {
|
||||
}
|
||||
|
||||
async prepare() {
|
||||
await this.database.prepare();
|
||||
await this.translator.prepare();
|
||||
|
||||
this.optionsSchema = await requestJson(chrome.runtime.getURL('/bg/data/options-schema.json'), 'GET');
|
||||
@ -296,6 +301,10 @@ class Backend {
|
||||
return true;
|
||||
}
|
||||
|
||||
async importDictionary(archiveSource, onProgress, details) {
|
||||
return await this.dictionaryImporter.import(this.database, archiveSource, onProgress, details);
|
||||
}
|
||||
|
||||
// Message handlers
|
||||
|
||||
_onApiYomichanCoreReady(_params, sender) {
|
||||
|
@ -110,6 +110,10 @@ class Database {
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
isPrepared() {
|
||||
return this.db !== null;
|
||||
}
|
||||
|
||||
async purge() {
|
||||
this._validate();
|
||||
|
||||
@ -322,177 +326,44 @@ class Database {
|
||||
return result;
|
||||
}
|
||||
|
||||
async importDictionary(archiveSource, onProgress, details) {
|
||||
async dictionaryExists(title) {
|
||||
this._validate();
|
||||
const db = this.db;
|
||||
const hasOnProgress = (typeof onProgress === 'function');
|
||||
|
||||
// Read archive
|
||||
const archive = await JSZip.loadAsync(archiveSource);
|
||||
|
||||
// Read and validate index
|
||||
const indexFileName = 'index.json';
|
||||
const indexFile = archive.files[indexFileName];
|
||||
if (!indexFile) {
|
||||
throw new Error('No dictionary index found in archive');
|
||||
const transaction = this.db.transaction(['dictionaries'], 'readonly');
|
||||
const index = transaction.objectStore('dictionaries').index('title');
|
||||
const query = IDBKeyRange.only(title);
|
||||
const count = await Database._getCount(index, query);
|
||||
return count > 0;
|
||||
}
|
||||
|
||||
const index = JSON.parse(await indexFile.async('string'));
|
||||
|
||||
const indexSchema = await this._getSchema('/bg/data/dictionary-index-schema.json');
|
||||
Database._validateJsonSchema(index, indexSchema, indexFileName);
|
||||
|
||||
const dictionaryTitle = index.title;
|
||||
const version = index.format || index.version;
|
||||
|
||||
if (!dictionaryTitle || !index.revision) {
|
||||
throw new Error('Unrecognized dictionary format');
|
||||
}
|
||||
|
||||
// Verify database is not already imported
|
||||
if (await this._dictionaryExists(dictionaryTitle)) {
|
||||
throw new Error('Dictionary is already imported');
|
||||
}
|
||||
|
||||
// Data format converters
|
||||
const convertTermBankEntry = (entry) => {
|
||||
if (version === 1) {
|
||||
const [expression, reading, definitionTags, rules, score, ...glossary] = entry;
|
||||
return {expression, reading, definitionTags, rules, score, glossary};
|
||||
} else {
|
||||
const [expression, reading, definitionTags, rules, score, glossary, sequence, termTags] = entry;
|
||||
return {expression, reading, definitionTags, rules, score, glossary, sequence, termTags};
|
||||
}
|
||||
};
|
||||
|
||||
const convertTermMetaBankEntry = (entry) => {
|
||||
const [expression, mode, data] = entry;
|
||||
return {expression, mode, data};
|
||||
};
|
||||
|
||||
const convertKanjiBankEntry = (entry) => {
|
||||
if (version === 1) {
|
||||
const [character, onyomi, kunyomi, tags, ...meanings] = entry;
|
||||
return {character, onyomi, kunyomi, tags, meanings};
|
||||
} else {
|
||||
const [character, onyomi, kunyomi, tags, meanings, stats] = entry;
|
||||
return {character, onyomi, kunyomi, tags, meanings, stats};
|
||||
}
|
||||
};
|
||||
|
||||
const convertKanjiMetaBankEntry = (entry) => {
|
||||
const [character, mode, data] = entry;
|
||||
return {character, mode, data};
|
||||
};
|
||||
|
||||
const convertTagBankEntry = (entry) => {
|
||||
const [name, category, order, notes, score] = entry;
|
||||
return {name, category, order, notes, score};
|
||||
};
|
||||
|
||||
// Archive file reading
|
||||
const readFileSequence = async (fileNameFormat, convertEntry, schema) => {
|
||||
const results = [];
|
||||
for (let i = 1; true; ++i) {
|
||||
const fileName = fileNameFormat.replace(/\?/, `${i}`);
|
||||
const file = archive.files[fileName];
|
||||
if (!file) { break; }
|
||||
|
||||
const entries = JSON.parse(await file.async('string'));
|
||||
Database._validateJsonSchema(entries, schema, fileName);
|
||||
|
||||
for (let entry of entries) {
|
||||
entry = convertEntry(entry);
|
||||
entry.dictionary = dictionaryTitle;
|
||||
results.push(entry);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
};
|
||||
|
||||
// Load schemas
|
||||
const dataBankSchemaPaths = this.constructor._getDataBankSchemaPaths(version);
|
||||
const dataBankSchemas = await Promise.all(dataBankSchemaPaths.map((path) => this._getSchema(path)));
|
||||
|
||||
// Load data
|
||||
const termList = await readFileSequence('term_bank_?.json', convertTermBankEntry, dataBankSchemas[0]);
|
||||
const termMetaList = await readFileSequence('term_meta_bank_?.json', convertTermMetaBankEntry, dataBankSchemas[1]);
|
||||
const kanjiList = await readFileSequence('kanji_bank_?.json', convertKanjiBankEntry, dataBankSchemas[2]);
|
||||
const kanjiMetaList = await readFileSequence('kanji_meta_bank_?.json', convertKanjiMetaBankEntry, dataBankSchemas[3]);
|
||||
const tagList = await readFileSequence('tag_bank_?.json', convertTagBankEntry, dataBankSchemas[4]);
|
||||
|
||||
// Old tags
|
||||
const indexTagMeta = index.tagMeta;
|
||||
if (typeof indexTagMeta === 'object' && indexTagMeta !== null) {
|
||||
for (const name of Object.keys(indexTagMeta)) {
|
||||
const {category, order, notes, score} = indexTagMeta[name];
|
||||
tagList.push({name, category, order, notes, score});
|
||||
}
|
||||
}
|
||||
|
||||
// Prefix wildcard support
|
||||
const prefixWildcardsSupported = !!details.prefixWildcardsSupported;
|
||||
if (prefixWildcardsSupported) {
|
||||
for (const entry of termList) {
|
||||
entry.expressionReverse = stringReverse(entry.expression);
|
||||
entry.readingReverse = stringReverse(entry.reading);
|
||||
}
|
||||
}
|
||||
|
||||
// Add dictionary
|
||||
const summary = {
|
||||
title: dictionaryTitle,
|
||||
revision: index.revision,
|
||||
sequenced: index.sequenced,
|
||||
version,
|
||||
prefixWildcardsSupported
|
||||
};
|
||||
|
||||
{
|
||||
const transaction = db.transaction(['dictionaries'], 'readwrite');
|
||||
const objectStore = transaction.objectStore('dictionaries');
|
||||
await Database._bulkAdd(objectStore, [summary], 0, 1);
|
||||
}
|
||||
|
||||
// Add data
|
||||
const errors = [];
|
||||
const total = (
|
||||
termList.length +
|
||||
termMetaList.length +
|
||||
kanjiList.length +
|
||||
kanjiMetaList.length +
|
||||
tagList.length
|
||||
);
|
||||
let loadedCount = 0;
|
||||
const maxTransactionLength = 1000;
|
||||
|
||||
const bulkAdd = async (objectStoreName, entries) => {
|
||||
const ii = entries.length;
|
||||
for (let i = 0; i < ii; i += maxTransactionLength) {
|
||||
const count = Math.min(maxTransactionLength, ii - i);
|
||||
|
||||
try {
|
||||
const transaction = db.transaction([objectStoreName], 'readwrite');
|
||||
bulkAdd(objectStoreName, items, start, count) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = this.db.transaction([objectStoreName], 'readwrite');
|
||||
const objectStore = transaction.objectStore(objectStoreName);
|
||||
await Database._bulkAdd(objectStore, entries, i, count);
|
||||
} catch (e) {
|
||||
errors.push(e);
|
||||
|
||||
if (start + count > items.length) {
|
||||
count = items.length - start;
|
||||
}
|
||||
|
||||
loadedCount += count;
|
||||
if (hasOnProgress) {
|
||||
onProgress(total, loadedCount);
|
||||
if (count <= 0) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
const end = start + count;
|
||||
let completedCount = 0;
|
||||
const onError = (e) => reject(e);
|
||||
const onSuccess = () => {
|
||||
if (++completedCount >= count) {
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
|
||||
await bulkAdd('terms', termList);
|
||||
await bulkAdd('termMeta', termMetaList);
|
||||
await bulkAdd('kanji', kanjiList);
|
||||
await bulkAdd('kanjiMeta', kanjiMetaList);
|
||||
await bulkAdd('tagMeta', tagList);
|
||||
|
||||
return {result: summary, errors};
|
||||
for (let i = start; i < end; ++i) {
|
||||
const request = objectStore.add(items[i]);
|
||||
request.onerror = onError;
|
||||
request.onsuccess = onSuccess;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Private
|
||||
@ -503,80 +374,6 @@ class Database {
|
||||
}
|
||||
}
|
||||
|
||||
async _getSchema(fileName) {
|
||||
let schemaPromise = this._schemas.get(fileName);
|
||||
if (typeof schemaPromise !== 'undefined') {
|
||||
return schemaPromise;
|
||||
}
|
||||
|
||||
schemaPromise = requestJson(chrome.runtime.getURL(fileName), 'GET');
|
||||
this._schemas.set(fileName, schemaPromise);
|
||||
return schemaPromise;
|
||||
}
|
||||
|
||||
static _validateJsonSchema(value, schema, fileName) {
|
||||
try {
|
||||
JsonSchema.validate(value, schema);
|
||||
} catch (e) {
|
||||
throw Database._formatSchemaError(e, fileName);
|
||||
}
|
||||
}
|
||||
|
||||
static _formatSchemaError(e, fileName) {
|
||||
const valuePathString = Database._getSchemaErrorPathString(e.info.valuePath, 'dictionary');
|
||||
const schemaPathString = Database._getSchemaErrorPathString(e.info.schemaPath, 'schema');
|
||||
|
||||
const e2 = new Error(`Dictionary has invalid data in '${fileName}' for value '${valuePathString}', validated against '${schemaPathString}': ${e.message}`);
|
||||
e2.data = e;
|
||||
|
||||
return e2;
|
||||
}
|
||||
|
||||
static _getSchemaErrorPathString(infoList, base='') {
|
||||
let result = base;
|
||||
for (const [part] of infoList) {
|
||||
switch (typeof part) {
|
||||
case 'string':
|
||||
if (result.length > 0) {
|
||||
result += '.';
|
||||
}
|
||||
result += part;
|
||||
break;
|
||||
case 'number':
|
||||
result += `[${part}]`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
static _getDataBankSchemaPaths(version) {
|
||||
const termBank = (
|
||||
version === 1 ?
|
||||
'/bg/data/dictionary-term-bank-v1-schema.json' :
|
||||
'/bg/data/dictionary-term-bank-v3-schema.json'
|
||||
);
|
||||
const termMetaBank = '/bg/data/dictionary-term-meta-bank-v3-schema.json';
|
||||
const kanjiBank = (
|
||||
version === 1 ?
|
||||
'/bg/data/dictionary-kanji-bank-v1-schema.json' :
|
||||
'/bg/data/dictionary-kanji-bank-v3-schema.json'
|
||||
);
|
||||
const kanjiMetaBank = '/bg/data/dictionary-kanji-meta-bank-v3-schema.json';
|
||||
const tagBank = '/bg/data/dictionary-tag-bank-v3-schema.json';
|
||||
|
||||
return [termBank, termMetaBank, kanjiBank, kanjiMetaBank, tagBank];
|
||||
}
|
||||
|
||||
async _dictionaryExists(title) {
|
||||
const db = this.db;
|
||||
const dbCountTransaction = db.transaction(['dictionaries'], 'readonly');
|
||||
const dbIndex = dbCountTransaction.objectStore('dictionaries').index('title');
|
||||
const only = IDBKeyRange.only(title);
|
||||
const count = await Database._getCount(dbIndex, only);
|
||||
return count > 0;
|
||||
}
|
||||
|
||||
async _findGenericBulk(tableName, indexName, indexValueList, dictionaries, createResult) {
|
||||
this._validate();
|
||||
|
||||
@ -760,34 +557,6 @@ class Database {
|
||||
});
|
||||
}
|
||||
|
||||
static _bulkAdd(objectStore, items, start, count) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (start + count > items.length) {
|
||||
count = items.length - start;
|
||||
}
|
||||
|
||||
if (count <= 0) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
const end = start + count;
|
||||
let completedCount = 0;
|
||||
const onError = (e) => reject(e);
|
||||
const onSuccess = () => {
|
||||
if (++completedCount >= count) {
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
|
||||
for (let i = start; i < end; ++i) {
|
||||
const request = objectStore.add(items[i]);
|
||||
request.onerror = onError;
|
||||
request.onsuccess = onSuccess;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static _open(name, version, onUpgradeNeeded) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const request = window.indexedDB.open(name, version * 10);
|
||||
|
266
ext/bg/js/dictionary-importer.js
Normal file
266
ext/bg/js/dictionary-importer.js
Normal file
@ -0,0 +1,266 @@
|
||||
/*
|
||||
* Copyright (C) 2020 Alex Yatskov <alex@foosoft.net>
|
||||
* Author: Alex Yatskov <alex@foosoft.net>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/* global
|
||||
* JSZip
|
||||
* JsonSchema
|
||||
* requestJson
|
||||
*/
|
||||
|
||||
class DictionaryImporter {
|
||||
constructor() {
|
||||
this._schemas = new Map();
|
||||
}
|
||||
|
||||
async import(database, archiveSource, onProgress, details) {
|
||||
if (!database) {
|
||||
throw new Error('Invalid database');
|
||||
}
|
||||
if (!database.isPrepared()) {
|
||||
throw new Error('Database is not ready');
|
||||
}
|
||||
|
||||
const hasOnProgress = (typeof onProgress === 'function');
|
||||
|
||||
// Read archive
|
||||
const archive = await JSZip.loadAsync(archiveSource);
|
||||
|
||||
// Read and validate index
|
||||
const indexFileName = 'index.json';
|
||||
const indexFile = archive.files[indexFileName];
|
||||
if (!indexFile) {
|
||||
throw new Error('No dictionary index found in archive');
|
||||
}
|
||||
|
||||
const index = JSON.parse(await indexFile.async('string'));
|
||||
|
||||
const indexSchema = await this._getSchema('/bg/data/dictionary-index-schema.json');
|
||||
this._validateJsonSchema(index, indexSchema, indexFileName);
|
||||
|
||||
const dictionaryTitle = index.title;
|
||||
const version = index.format || index.version;
|
||||
|
||||
if (!dictionaryTitle || !index.revision) {
|
||||
throw new Error('Unrecognized dictionary format');
|
||||
}
|
||||
|
||||
// Verify database is not already imported
|
||||
if (await database.dictionaryExists(dictionaryTitle)) {
|
||||
throw new Error('Dictionary is already imported');
|
||||
}
|
||||
|
||||
// Data format converters
|
||||
const convertTermBankEntry = (entry) => {
|
||||
if (version === 1) {
|
||||
const [expression, reading, definitionTags, rules, score, ...glossary] = entry;
|
||||
return {expression, reading, definitionTags, rules, score, glossary};
|
||||
} else {
|
||||
const [expression, reading, definitionTags, rules, score, glossary, sequence, termTags] = entry;
|
||||
return {expression, reading, definitionTags, rules, score, glossary, sequence, termTags};
|
||||
}
|
||||
};
|
||||
|
||||
const convertTermMetaBankEntry = (entry) => {
|
||||
const [expression, mode, data] = entry;
|
||||
return {expression, mode, data};
|
||||
};
|
||||
|
||||
const convertKanjiBankEntry = (entry) => {
|
||||
if (version === 1) {
|
||||
const [character, onyomi, kunyomi, tags, ...meanings] = entry;
|
||||
return {character, onyomi, kunyomi, tags, meanings};
|
||||
} else {
|
||||
const [character, onyomi, kunyomi, tags, meanings, stats] = entry;
|
||||
return {character, onyomi, kunyomi, tags, meanings, stats};
|
||||
}
|
||||
};
|
||||
|
||||
const convertKanjiMetaBankEntry = (entry) => {
|
||||
const [character, mode, data] = entry;
|
||||
return {character, mode, data};
|
||||
};
|
||||
|
||||
const convertTagBankEntry = (entry) => {
|
||||
const [name, category, order, notes, score] = entry;
|
||||
return {name, category, order, notes, score};
|
||||
};
|
||||
|
||||
// Archive file reading
|
||||
const readFileSequence = async (fileNameFormat, convertEntry, schema) => {
|
||||
const results = [];
|
||||
for (let i = 1; true; ++i) {
|
||||
const fileName = fileNameFormat.replace(/\?/, `${i}`);
|
||||
const file = archive.files[fileName];
|
||||
if (!file) { break; }
|
||||
|
||||
const entries = JSON.parse(await file.async('string'));
|
||||
this._validateJsonSchema(entries, schema, fileName);
|
||||
|
||||
for (let entry of entries) {
|
||||
entry = convertEntry(entry);
|
||||
entry.dictionary = dictionaryTitle;
|
||||
results.push(entry);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
};
|
||||
|
||||
// Load schemas
|
||||
const dataBankSchemaPaths = this._getDataBankSchemaPaths(version);
|
||||
const dataBankSchemas = await Promise.all(dataBankSchemaPaths.map((path) => this._getSchema(path)));
|
||||
|
||||
// Load data
|
||||
const termList = await readFileSequence('term_bank_?.json', convertTermBankEntry, dataBankSchemas[0]);
|
||||
const termMetaList = await readFileSequence('term_meta_bank_?.json', convertTermMetaBankEntry, dataBankSchemas[1]);
|
||||
const kanjiList = await readFileSequence('kanji_bank_?.json', convertKanjiBankEntry, dataBankSchemas[2]);
|
||||
const kanjiMetaList = await readFileSequence('kanji_meta_bank_?.json', convertKanjiMetaBankEntry, dataBankSchemas[3]);
|
||||
const tagList = await readFileSequence('tag_bank_?.json', convertTagBankEntry, dataBankSchemas[4]);
|
||||
|
||||
// Old tags
|
||||
const indexTagMeta = index.tagMeta;
|
||||
if (typeof indexTagMeta === 'object' && indexTagMeta !== null) {
|
||||
for (const name of Object.keys(indexTagMeta)) {
|
||||
const {category, order, notes, score} = indexTagMeta[name];
|
||||
tagList.push({name, category, order, notes, score});
|
||||
}
|
||||
}
|
||||
|
||||
// Prefix wildcard support
|
||||
const prefixWildcardsSupported = !!details.prefixWildcardsSupported;
|
||||
if (prefixWildcardsSupported) {
|
||||
for (const entry of termList) {
|
||||
entry.expressionReverse = stringReverse(entry.expression);
|
||||
entry.readingReverse = stringReverse(entry.reading);
|
||||
}
|
||||
}
|
||||
|
||||
// Add dictionary
|
||||
const summary = {
|
||||
title: dictionaryTitle,
|
||||
revision: index.revision,
|
||||
sequenced: index.sequenced,
|
||||
version,
|
||||
prefixWildcardsSupported
|
||||
};
|
||||
|
||||
database.bulkAdd('dictionaries', [summary], 0, 1);
|
||||
|
||||
// Add data
|
||||
const errors = [];
|
||||
const total = (
|
||||
termList.length +
|
||||
termMetaList.length +
|
||||
kanjiList.length +
|
||||
kanjiMetaList.length +
|
||||
tagList.length
|
||||
);
|
||||
let loadedCount = 0;
|
||||
const maxTransactionLength = 1000;
|
||||
|
||||
const bulkAdd = async (objectStoreName, entries) => {
|
||||
const ii = entries.length;
|
||||
for (let i = 0; i < ii; i += maxTransactionLength) {
|
||||
const count = Math.min(maxTransactionLength, ii - i);
|
||||
|
||||
try {
|
||||
await database.bulkAdd(objectStoreName, entries, i, count);
|
||||
} catch (e) {
|
||||
errors.push(e);
|
||||
}
|
||||
|
||||
loadedCount += count;
|
||||
if (hasOnProgress) {
|
||||
onProgress(total, loadedCount);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
await bulkAdd('terms', termList);
|
||||
await bulkAdd('termMeta', termMetaList);
|
||||
await bulkAdd('kanji', kanjiList);
|
||||
await bulkAdd('kanjiMeta', kanjiMetaList);
|
||||
await bulkAdd('tagMeta', tagList);
|
||||
|
||||
return {result: summary, errors};
|
||||
}
|
||||
|
||||
async _getSchema(fileName) {
|
||||
let schemaPromise = this._schemas.get(fileName);
|
||||
if (typeof schemaPromise !== 'undefined') {
|
||||
return schemaPromise;
|
||||
}
|
||||
|
||||
schemaPromise = requestJson(chrome.runtime.getURL(fileName), 'GET');
|
||||
this._schemas.set(fileName, schemaPromise);
|
||||
return schemaPromise;
|
||||
}
|
||||
|
||||
_validateJsonSchema(value, schema, fileName) {
|
||||
try {
|
||||
JsonSchema.validate(value, schema);
|
||||
} catch (e) {
|
||||
throw this._formatSchemaError(e, fileName);
|
||||
}
|
||||
}
|
||||
|
||||
_formatSchemaError(e, fileName) {
|
||||
const valuePathString = this._getSchemaErrorPathString(e.info.valuePath, 'dictionary');
|
||||
const schemaPathString = this._getSchemaErrorPathString(e.info.schemaPath, 'schema');
|
||||
|
||||
const e2 = new Error(`Dictionary has invalid data in '${fileName}' for value '${valuePathString}', validated against '${schemaPathString}': ${e.message}`);
|
||||
e2.data = e;
|
||||
|
||||
return e2;
|
||||
}
|
||||
|
||||
_getSchemaErrorPathString(infoList, base='') {
|
||||
let result = base;
|
||||
for (const [part] of infoList) {
|
||||
switch (typeof part) {
|
||||
case 'string':
|
||||
if (result.length > 0) {
|
||||
result += '.';
|
||||
}
|
||||
result += part;
|
||||
break;
|
||||
case 'number':
|
||||
result += `[${part}]`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
_getDataBankSchemaPaths(version) {
|
||||
const termBank = (
|
||||
version === 1 ?
|
||||
'/bg/data/dictionary-term-bank-v1-schema.json' :
|
||||
'/bg/data/dictionary-term-bank-v3-schema.json'
|
||||
);
|
||||
const termMetaBank = '/bg/data/dictionary-term-meta-bank-v3-schema.json';
|
||||
const kanjiBank = (
|
||||
version === 1 ?
|
||||
'/bg/data/dictionary-kanji-bank-v1-schema.json' :
|
||||
'/bg/data/dictionary-kanji-bank-v3-schema.json'
|
||||
);
|
||||
const kanjiMetaBank = '/bg/data/dictionary-kanji-meta-bank-v3-schema.json';
|
||||
const tagBank = '/bg/data/dictionary-tag-bank-v3-schema.json';
|
||||
|
||||
return [termBank, termMetaBank, kanjiBank, kanjiMetaBank, tagBank];
|
||||
}
|
||||
}
|
@ -17,7 +17,6 @@
|
||||
*/
|
||||
|
||||
/* global
|
||||
* Database
|
||||
* Deinflector
|
||||
* TextSourceMap
|
||||
* dictEnabledSet
|
||||
@ -35,24 +34,17 @@
|
||||
*/
|
||||
|
||||
class Translator {
|
||||
constructor() {
|
||||
this.database = null;
|
||||
constructor(database) {
|
||||
this.database = database;
|
||||
this.deinflector = null;
|
||||
this.tagCache = new Map();
|
||||
}
|
||||
|
||||
async prepare() {
|
||||
if (!this.database) {
|
||||
this.database = new Database();
|
||||
await this.database.prepare();
|
||||
}
|
||||
|
||||
if (!this.deinflector) {
|
||||
const url = chrome.runtime.getURL('/bg/lang/deinflect.json');
|
||||
const reasons = await requestJson(url, 'GET');
|
||||
this.deinflector = new Deinflector(reasons);
|
||||
}
|
||||
}
|
||||
|
||||
async purgeDatabase() {
|
||||
this.tagCache.clear();
|
||||
|
@ -118,7 +118,7 @@ async function utilDatabaseDeleteDictionary(dictionaryName, onProgress) {
|
||||
|
||||
async function utilDatabaseImport(data, onProgress, details) {
|
||||
data = await utilReadFile(data);
|
||||
return utilIsolate(await utilBackend().translator.database.importDictionary(
|
||||
return utilIsolate(await utilBackend().importDictionary(
|
||||
utilBackgroundIsolate(data),
|
||||
utilBackgroundFunctionIsolate(onProgress),
|
||||
utilBackgroundIsolate(details)
|
||||
|
@ -107,8 +107,10 @@ vm.execute([
|
||||
'bg/js/dictionary.js',
|
||||
'mixed/js/core.js',
|
||||
'bg/js/request.js',
|
||||
'bg/js/dictionary-importer.js',
|
||||
'bg/js/database.js'
|
||||
]);
|
||||
const DictionaryImporter = vm.get('DictionaryImporter');
|
||||
const Database = vm.get('Database');
|
||||
|
||||
|
||||
@ -196,6 +198,7 @@ async function testDatabase1() {
|
||||
];
|
||||
|
||||
// Setup database
|
||||
const dictionaryImporter = new DictionaryImporter();
|
||||
const database = new Database();
|
||||
await database.prepare();
|
||||
|
||||
@ -210,7 +213,8 @@ async function testDatabase1() {
|
||||
|
||||
// Import data
|
||||
let progressEvent = false;
|
||||
const {result, errors} = await database.importDictionary(
|
||||
const {result, errors} = await dictionaryImporter.import(
|
||||
database,
|
||||
testDictionarySource,
|
||||
() => {
|
||||
progressEvent = true;
|
||||
@ -848,6 +852,7 @@ async function testDatabase2() {
|
||||
]);
|
||||
|
||||
// Setup database
|
||||
const dictionaryImporter = new DictionaryImporter();
|
||||
const database = new Database();
|
||||
|
||||
// Error: not prepared
|
||||
@ -863,17 +868,17 @@ async function testDatabase2() {
|
||||
await assert.rejects(async () => await database.findTagForTitle('tag', title));
|
||||
await assert.rejects(async () => await database.getDictionaryInfo());
|
||||
await assert.rejects(async () => await database.getDictionaryCounts(titles, true));
|
||||
await assert.rejects(async () => await database.importDictionary(testDictionarySource, () => {}, {}));
|
||||
await assert.rejects(async () => await dictionaryImporter.import(database, testDictionarySource, () => {}, {}));
|
||||
|
||||
await database.prepare();
|
||||
|
||||
// Error: already prepared
|
||||
await assert.rejects(async () => await database.prepare());
|
||||
|
||||
await database.importDictionary(testDictionarySource, () => {}, {});
|
||||
await dictionaryImporter.import(database, testDictionarySource, () => {}, {});
|
||||
|
||||
// Error: dictionary already imported
|
||||
await assert.rejects(async () => await database.importDictionary(testDictionarySource, () => {}, {}));
|
||||
await assert.rejects(async () => await dictionaryImporter.import(database, testDictionarySource, () => {}, {}));
|
||||
|
||||
await database.close();
|
||||
}
|
||||
@ -890,6 +895,7 @@ async function testDatabase3() {
|
||||
];
|
||||
|
||||
// Setup database
|
||||
const dictionaryImporter = new DictionaryImporter();
|
||||
const database = new Database();
|
||||
await database.prepare();
|
||||
|
||||
@ -899,7 +905,7 @@ async function testDatabase3() {
|
||||
|
||||
let error = null;
|
||||
try {
|
||||
await database.importDictionary(testDictionarySource, () => {}, {});
|
||||
await dictionaryImporter.import(database, testDictionarySource, () => {}, {});
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user