use handlebars templates for query parser

This commit is contained in:
siikamiika 2019-10-31 23:56:44 +02:00
parent e6a1b78164
commit 3881457e4e
5 changed files with 99 additions and 33 deletions

View File

@ -85,6 +85,7 @@ async function apiTextParse(text, optionsContext) {
const results = [];
while (text) {
const term = [];
let [definitions, sourceLength] = await translator.findTerms(text, {}, options);
if (definitions.length > 0) {
definitions = dictTermsSort(definitions);
@ -98,22 +99,23 @@ async function apiTextParse(text, optionsContext) {
}
const offset = source.length - stemLength;
for (const result of jpDistributeFurigana(
for (const {text, furigana} of jpDistributeFurigana(
source.slice(0, offset === 0 ? source.length : source.length - offset),
reading.slice(0, offset === 0 ? reading.length : source.length + (reading.length - expression.length) - offset)
reading.slice(0, offset === 0 ? reading.length : reading.length - expression.length + stemLength)
)) {
results.push(result);
term.push({text, reading: furigana || ''});
}
if (stemLength !== source.length) {
results.push({text: source.slice(stemLength)});
term.push({text: source.slice(stemLength)});
}
text = text.slice(source.length);
} else {
results.push({text: text[0]});
term.push({text: text[0]});
text = text.slice(1);
}
results.push(term);
}
return results;
}

View File

@ -59,39 +59,33 @@ class QueryParser {
}
async setText(text) {
this.queryParser.innerHTML = '';
this.search.setSpinnerVisible(true);
const previewTerms = [];
let previewText = text;
while (previewText) {
const tempText = previewText.slice(0, 2);
previewTerms.push([{text: tempText}]);
previewText = previewText.slice(2);
const tempRuby = document.createElement('ruby');
const tempFurigana = document.createElement('rt');
tempRuby.appendChild(document.createTextNode(tempText));
tempRuby.appendChild(tempFurigana);
this.queryParser.appendChild(tempRuby);
}
this.queryParser.innerHTML = await apiTemplateRender('query-parser.html', {
terms: previewTerms,
preview: true
});
const results = await apiTextParse(text, this.search.getOptionsContext());
const textContainer = document.createElement('div');
for (const {text, furigana} of results) {
const rubyElement = document.createElement('ruby');
const furiganaElement = document.createElement('rt');
if (furigana) {
furiganaElement.innerText = furigana;
rubyElement.appendChild(document.createTextNode(text));
rubyElement.appendChild(furiganaElement);
} else {
rubyElement.appendChild(document.createTextNode(text));
rubyElement.appendChild(furiganaElement);
}
textContainer.appendChild(rubyElement);
}
this.queryParser.innerHTML = '';
this.queryParser.appendChild(textContainer);
const content = await apiTemplateRender('query-parser.html', {
terms: results.map((term) => {
return term.map((part) => {
part.raw = !part.text.trim() && (!part.reading || !part.reading.trim());
return part;
});
})
});
this.queryParser.innerHTML = content;
this.search.setSpinnerVisible(false);
}

View File

@ -162,6 +162,54 @@ templates['kanji.html'] = template({"1":function(container,depth0,helpers,partia
return fn;
}
,"useDecorators":true,"usePartial":true,"useData":true,"useDepths":true});
templates['query-parser.html'] = template({"1":function(container,depth0,helpers,partials,data) {
var stack1, alias1=depth0 != null ? depth0 : (container.nullContext || {});
return ((stack1 = helpers["if"].call(alias1,(depth0 != null ? depth0.preview : depth0),{"name":"if","hash":{},"fn":container.program(2, data, 0),"inverse":container.program(4, data, 0),"data":data})) != null ? stack1 : "")
+ ((stack1 = helpers.each.call(alias1,depth0,{"name":"each","hash":{},"fn":container.program(6, data, 0),"inverse":container.noop,"data":data})) != null ? stack1 : "")
+ "</span>";
},"2":function(container,depth0,helpers,partials,data) {
return "<span class=\"query-parser-term-preview\">";
},"4":function(container,depth0,helpers,partials,data) {
return "<span class=\"query-parser-term\">";
},"6":function(container,depth0,helpers,partials,data) {
var stack1;
return ((stack1 = container.invokePartial(partials.part,depth0,{"name":"part","data":data,"helpers":helpers,"partials":partials,"decorators":container.decorators})) != null ? stack1 : "");
},"8":function(container,depth0,helpers,partials,data) {
var stack1;
return ((stack1 = helpers["if"].call(depth0 != null ? depth0 : (container.nullContext || {}),(depth0 != null ? depth0.raw : depth0),{"name":"if","hash":{},"fn":container.program(9, data, 0),"inverse":container.program(11, data, 0),"data":data})) != null ? stack1 : "");
},"9":function(container,depth0,helpers,partials,data) {
var helper;
return container.escapeExpression(((helper = (helper = helpers.text || (depth0 != null ? depth0.text : depth0)) != null ? helper : helpers.helperMissing),(typeof helper === "function" ? helper.call(depth0 != null ? depth0 : (container.nullContext || {}),{"name":"text","hash":{},"data":data}) : helper)));
},"11":function(container,depth0,helpers,partials,data) {
var helper, alias1=depth0 != null ? depth0 : (container.nullContext || {}), alias2=helpers.helperMissing, alias3="function", alias4=container.escapeExpression;
return "<ruby>"
+ alias4(((helper = (helper = helpers.text || (depth0 != null ? depth0.text : depth0)) != null ? helper : alias2),(typeof helper === alias3 ? helper.call(alias1,{"name":"text","hash":{},"data":data}) : helper)))
+ "<rt>"
+ alias4(((helper = (helper = helpers.reading || (depth0 != null ? depth0.reading : depth0)) != null ? helper : alias2),(typeof helper === alias3 ? helper.call(alias1,{"name":"reading","hash":{},"data":data}) : helper)))
+ "</rt></ruby>";
},"13":function(container,depth0,helpers,partials,data,blockParams,depths) {
var stack1;
return ((stack1 = container.invokePartial(partials.term,depth0,{"name":"term","hash":{"preview":(depths[1] != null ? depths[1].preview : depths[1])},"data":data,"helpers":helpers,"partials":partials,"decorators":container.decorators})) != null ? stack1 : "");
},"compiler":[7,">= 4.0.0"],"main":function(container,depth0,helpers,partials,data,blockParams,depths) {
var stack1;
return ((stack1 = helpers.each.call(depth0 != null ? depth0 : (container.nullContext || {}),(depth0 != null ? depth0.terms : depth0),{"name":"each","hash":{},"fn":container.program(13, data, 0, blockParams, depths),"inverse":container.noop,"data":data})) != null ? stack1 : "");
},"main_d": function(fn, props, container, depth0, data, blockParams, depths) {
var decorators = container.decorators;
fn = decorators.inline(fn,props,container,{"name":"inline","hash":{},"fn":container.program(1, data, 0, blockParams, depths),"inverse":container.noop,"args":["term"],"data":data}) || fn;
fn = decorators.inline(fn,props,container,{"name":"inline","hash":{},"fn":container.program(8, data, 0, blockParams, depths),"inverse":container.noop,"args":["part"],"data":data}) || fn;
return fn;
}
,"useDecorators":true,"usePartial":true,"useData":true,"useDepths":true});
templates['terms.html'] = template({"1":function(container,depth0,helpers,partials,data) {
var stack1, helper, options, alias1=depth0 != null ? depth0 : (container.nullContext || {}), buffer =

View File

@ -93,13 +93,12 @@ ol, ul {
font-size: 24px;
}
html:root[data-yomichan-page=search] body {
min-height: 101vh; /* always show scroll bar to avoid scanning problems */
.query-parser-term {
margin-right: 5px;
}
#query-parser {
margin-top: 10px;
font-size: 24px;
html:root[data-yomichan-page=search] body {
min-height: 101vh; /* always show scroll bar to avoid scanning problems */
}

23
tmpl/query-parser.html Normal file
View File

@ -0,0 +1,23 @@
{{~#*inline "term"~}}
{{~#if preview~}}
<span class="query-parser-term-preview">
{{~else~}}
<span class="query-parser-term">
{{~/if~}}
{{~#each this~}}
{{> part }}
{{~/each~}}
</span>
{{~/inline~}}
{{~#*inline "part"~}}
{{~#if raw~}}
{{text}}
{{~else~}}
<ruby>{{text}}<rt>{{reading}}</rt></ruby>
{{~/if~}}
{{~/inline~}}
{{~#each terms~}}
{{> term preview=../preview }}
{{~/each~}}