Upgrade Lunrjs to 2.3.9 and switch to relative_url (#2805)
* Update Lunr to 2.3.9 * Switch from absolute_url to relative_url
This commit is contained in:
parent
923aa95c97
commit
0ec1aa1292
3 changed files with 75 additions and 79 deletions
4
assets/js/lunr/lunr-store.js
vendored
4
assets/js/lunr/lunr-store.js
vendored
|
@ -42,8 +42,8 @@ var store = [
|
||||||
{%- endif -%}
|
{%- endif -%}
|
||||||
"categories": {{ doc.categories | jsonify }},
|
"categories": {{ doc.categories | jsonify }},
|
||||||
"tags": {{ doc.tags | jsonify }},
|
"tags": {{ doc.tags | jsonify }},
|
||||||
"url": {{ doc.url | absolute_url | jsonify }},
|
"url": {{ doc.url | relative_url | jsonify }},
|
||||||
"teaser": {{ teaser | absolute_url | jsonify }}
|
"teaser": {{ teaser | relative_url | jsonify }}
|
||||||
}{%- unless forloop.last and l -%},{%- endunless -%}
|
}{%- unless forloop.last and l -%},{%- endunless -%}
|
||||||
{%- endfor -%}
|
{%- endfor -%}
|
||||||
{%- endfor -%}]
|
{%- endfor -%}]
|
||||||
|
|
143
assets/js/lunr/lunr.js
vendored
143
assets/js/lunr/lunr.js
vendored
|
@ -1,6 +1,6 @@
|
||||||
/**
|
/**
|
||||||
* lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.5
|
* lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
* @license MIT
|
* @license MIT
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -54,10 +54,10 @@ var lunr = function (config) {
|
||||||
return builder.build()
|
return builder.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
lunr.version = "2.3.5"
|
lunr.version = "2.3.9"
|
||||||
/*!
|
/*!
|
||||||
* lunr.utils
|
* lunr.utils
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -177,7 +177,7 @@ lunr.FieldRef.prototype.toString = function () {
|
||||||
}
|
}
|
||||||
/*!
|
/*!
|
||||||
* lunr.Set
|
* lunr.Set
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -211,8 +211,8 @@ lunr.Set.complete = {
|
||||||
return other
|
return other
|
||||||
},
|
},
|
||||||
|
|
||||||
union: function (other) {
|
union: function () {
|
||||||
return other
|
return this
|
||||||
},
|
},
|
||||||
|
|
||||||
contains: function () {
|
contains: function () {
|
||||||
|
@ -389,7 +389,7 @@ lunr.Token.prototype.clone = function (fn) {
|
||||||
}
|
}
|
||||||
/*!
|
/*!
|
||||||
* lunr.tokenizer
|
* lunr.tokenizer
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -424,7 +424,7 @@ lunr.tokenizer = function (obj, metadata) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
var str = obj.toString().trim().toLowerCase(),
|
var str = obj.toString().toLowerCase(),
|
||||||
len = str.length,
|
len = str.length,
|
||||||
tokens = []
|
tokens = []
|
||||||
|
|
||||||
|
@ -465,7 +465,7 @@ lunr.tokenizer = function (obj, metadata) {
|
||||||
lunr.tokenizer.separator = /[\s\-]+/
|
lunr.tokenizer.separator = /[\s\-]+/
|
||||||
/*!
|
/*!
|
||||||
* lunr.Pipeline
|
* lunr.Pipeline
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -509,8 +509,8 @@ lunr.Pipeline.registeredFunctions = Object.create(null)
|
||||||
* or mutate (or add) metadata for a given token.
|
* or mutate (or add) metadata for a given token.
|
||||||
*
|
*
|
||||||
* A pipeline function can indicate that the passed token should be discarded by returning
|
* A pipeline function can indicate that the passed token should be discarded by returning
|
||||||
* null. This token will not be passed to any downstream pipeline functions and will not be
|
* null, undefined or an empty string. This token will not be passed to any downstream pipeline
|
||||||
* added to the index.
|
* functions and will not be added to the index.
|
||||||
*
|
*
|
||||||
* Multiple tokens can be returned by returning an array of tokens. Each token will be passed
|
* Multiple tokens can be returned by returning an array of tokens. Each token will be passed
|
||||||
* to any downstream pipeline functions and all will returned tokens will be added to the index.
|
* to any downstream pipeline functions and all will returned tokens will be added to the index.
|
||||||
|
@ -673,7 +673,7 @@ lunr.Pipeline.prototype.run = function (tokens) {
|
||||||
for (var j = 0; j < tokens.length; j++) {
|
for (var j = 0; j < tokens.length; j++) {
|
||||||
var result = fn(tokens[j], j, tokens)
|
var result = fn(tokens[j], j, tokens)
|
||||||
|
|
||||||
if (result === void 0 || result === '') continue
|
if (result === null || result === void 0 || result === '') continue
|
||||||
|
|
||||||
if (Array.isArray(result)) {
|
if (Array.isArray(result)) {
|
||||||
for (var k = 0; k < result.length; k++) {
|
for (var k = 0; k < result.length; k++) {
|
||||||
|
@ -732,7 +732,7 @@ lunr.Pipeline.prototype.toJSON = function () {
|
||||||
}
|
}
|
||||||
/*!
|
/*!
|
||||||
* lunr.Vector
|
* lunr.Vector
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -929,7 +929,7 @@ lunr.Vector.prototype.toJSON = function () {
|
||||||
/* eslint-disable */
|
/* eslint-disable */
|
||||||
/*!
|
/*!
|
||||||
* lunr.stemmer
|
* lunr.stemmer
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
* Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt
|
* Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -1151,7 +1151,7 @@ lunr.stemmer = (function(){
|
||||||
lunr.Pipeline.registerFunction(lunr.stemmer, 'stemmer')
|
lunr.Pipeline.registerFunction(lunr.stemmer, 'stemmer')
|
||||||
/*!
|
/*!
|
||||||
* lunr.stopWordFilter
|
* lunr.stopWordFilter
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1316,7 +1316,7 @@ lunr.stopWordFilter = lunr.generateStopWordFilter([
|
||||||
lunr.Pipeline.registerFunction(lunr.stopWordFilter, 'stopWordFilter')
|
lunr.Pipeline.registerFunction(lunr.stopWordFilter, 'stopWordFilter')
|
||||||
/*!
|
/*!
|
||||||
* lunr.trimmer
|
* lunr.trimmer
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1343,7 +1343,7 @@ lunr.trimmer = function (token) {
|
||||||
lunr.Pipeline.registerFunction(lunr.trimmer, 'trimmer')
|
lunr.Pipeline.registerFunction(lunr.trimmer, 'trimmer')
|
||||||
/*!
|
/*!
|
||||||
* lunr.TokenSet
|
* lunr.TokenSet
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1469,41 +1469,49 @@ lunr.TokenSet.fromFuzzyString = function (str, editDistance) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (frame.editsRemaining == 0) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// insertion
|
||||||
|
if ("*" in frame.node.edges) {
|
||||||
|
var insertionNode = frame.node.edges["*"]
|
||||||
|
} else {
|
||||||
|
var insertionNode = new lunr.TokenSet
|
||||||
|
frame.node.edges["*"] = insertionNode
|
||||||
|
}
|
||||||
|
|
||||||
|
if (frame.str.length == 0) {
|
||||||
|
insertionNode.final = true
|
||||||
|
}
|
||||||
|
|
||||||
|
stack.push({
|
||||||
|
node: insertionNode,
|
||||||
|
editsRemaining: frame.editsRemaining - 1,
|
||||||
|
str: frame.str
|
||||||
|
})
|
||||||
|
|
||||||
// deletion
|
// deletion
|
||||||
// can only do a deletion if we have enough edits remaining
|
// can only do a deletion if we have enough edits remaining
|
||||||
// and if there are characters left to delete in the string
|
// and if there are characters left to delete in the string
|
||||||
if (frame.editsRemaining > 0 && frame.str.length > 1) {
|
if (frame.str.length > 1) {
|
||||||
var char = frame.str.charAt(1),
|
stack.push({
|
||||||
deletionNode
|
node: frame.node,
|
||||||
|
editsRemaining: frame.editsRemaining - 1,
|
||||||
if (char in frame.node.edges) {
|
str: frame.str.slice(1)
|
||||||
deletionNode = frame.node.edges[char]
|
})
|
||||||
} else {
|
|
||||||
deletionNode = new lunr.TokenSet
|
|
||||||
frame.node.edges[char] = deletionNode
|
|
||||||
}
|
|
||||||
|
|
||||||
if (frame.str.length <= 2) {
|
|
||||||
deletionNode.final = true
|
|
||||||
} else {
|
|
||||||
stack.push({
|
|
||||||
node: deletionNode,
|
|
||||||
editsRemaining: frame.editsRemaining - 1,
|
|
||||||
str: frame.str.slice(2)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// deletion
|
// deletion
|
||||||
// just removing the last character from the str
|
// just removing the last character from the str
|
||||||
if (frame.editsRemaining > 0 && frame.str.length == 1) {
|
if (frame.str.length == 1) {
|
||||||
frame.node.final = true
|
frame.node.final = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// substitution
|
// substitution
|
||||||
// can only do a substitution if we have enough edits remaining
|
// can only do a substitution if we have enough edits remaining
|
||||||
// and if there are characters left to substitute
|
// and if there are characters left to substitute
|
||||||
if (frame.editsRemaining > 0 && frame.str.length >= 1) {
|
if (frame.str.length >= 1) {
|
||||||
if ("*" in frame.node.edges) {
|
if ("*" in frame.node.edges) {
|
||||||
var substitutionNode = frame.node.edges["*"]
|
var substitutionNode = frame.node.edges["*"]
|
||||||
} else {
|
} else {
|
||||||
|
@ -1513,40 +1521,19 @@ lunr.TokenSet.fromFuzzyString = function (str, editDistance) {
|
||||||
|
|
||||||
if (frame.str.length == 1) {
|
if (frame.str.length == 1) {
|
||||||
substitutionNode.final = true
|
substitutionNode.final = true
|
||||||
} else {
|
|
||||||
stack.push({
|
|
||||||
node: substitutionNode,
|
|
||||||
editsRemaining: frame.editsRemaining - 1,
|
|
||||||
str: frame.str.slice(1)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// insertion
|
|
||||||
// can only do insertion if there are edits remaining
|
|
||||||
if (frame.editsRemaining > 0) {
|
|
||||||
if ("*" in frame.node.edges) {
|
|
||||||
var insertionNode = frame.node.edges["*"]
|
|
||||||
} else {
|
|
||||||
var insertionNode = new lunr.TokenSet
|
|
||||||
frame.node.edges["*"] = insertionNode
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (frame.str.length == 0) {
|
stack.push({
|
||||||
insertionNode.final = true
|
node: substitutionNode,
|
||||||
} else {
|
editsRemaining: frame.editsRemaining - 1,
|
||||||
stack.push({
|
str: frame.str.slice(1)
|
||||||
node: insertionNode,
|
})
|
||||||
editsRemaining: frame.editsRemaining - 1,
|
|
||||||
str: frame.str
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// transposition
|
// transposition
|
||||||
// can only do a transposition if there are edits remaining
|
// can only do a transposition if there are edits remaining
|
||||||
// and there are enough characters to transpose
|
// and there are enough characters to transpose
|
||||||
if (frame.editsRemaining > 0 && frame.str.length > 1) {
|
if (frame.str.length > 1) {
|
||||||
var charA = frame.str.charAt(0),
|
var charA = frame.str.charAt(0),
|
||||||
charB = frame.str.charAt(1),
|
charB = frame.str.charAt(1),
|
||||||
transposeNode
|
transposeNode
|
||||||
|
@ -1560,13 +1547,13 @@ lunr.TokenSet.fromFuzzyString = function (str, editDistance) {
|
||||||
|
|
||||||
if (frame.str.length == 1) {
|
if (frame.str.length == 1) {
|
||||||
transposeNode.final = true
|
transposeNode.final = true
|
||||||
} else {
|
|
||||||
stack.push({
|
|
||||||
node: transposeNode,
|
|
||||||
editsRemaining: frame.editsRemaining - 1,
|
|
||||||
str: charA + frame.str.slice(2)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stack.push({
|
||||||
|
node: transposeNode,
|
||||||
|
editsRemaining: frame.editsRemaining - 1,
|
||||||
|
str: charA + frame.str.slice(2)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1619,6 +1606,10 @@ lunr.TokenSet.fromString = function (str) {
|
||||||
* Converts this TokenSet into an array of strings
|
* Converts this TokenSet into an array of strings
|
||||||
* contained within the TokenSet.
|
* contained within the TokenSet.
|
||||||
*
|
*
|
||||||
|
* This is not intended to be used on a TokenSet that
|
||||||
|
* contains wildcards, in these cases the results are
|
||||||
|
* undefined and are likely to cause an infinite loop.
|
||||||
|
*
|
||||||
* @returns {string[]}
|
* @returns {string[]}
|
||||||
*/
|
*/
|
||||||
lunr.TokenSet.prototype.toArray = function () {
|
lunr.TokenSet.prototype.toArray = function () {
|
||||||
|
@ -1836,7 +1827,7 @@ lunr.TokenSet.Builder.prototype.minimize = function (downTo) {
|
||||||
}
|
}
|
||||||
/*!
|
/*!
|
||||||
* lunr.Index
|
* lunr.Index
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -2003,7 +1994,7 @@ lunr.Index.prototype.query = function (fn) {
|
||||||
*/
|
*/
|
||||||
var clause = query.clauses[i],
|
var clause = query.clauses[i],
|
||||||
terms = null,
|
terms = null,
|
||||||
clauseMatches = lunr.Set.complete
|
clauseMatches = lunr.Set.empty
|
||||||
|
|
||||||
if (clause.usePipeline) {
|
if (clause.usePipeline) {
|
||||||
terms = this.pipeline.runString(clause.term, {
|
terms = this.pipeline.runString(clause.term, {
|
||||||
|
@ -2328,7 +2319,7 @@ lunr.Index.load = function (serializedIndex) {
|
||||||
}
|
}
|
||||||
/*!
|
/*!
|
||||||
* lunr.Builder
|
* lunr.Builder
|
||||||
* Copyright (C) 2018 Oliver Nightingale
|
* Copyright (C) 2020 Oliver Nightingale
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
7
assets/js/lunr/lunr.min.js
vendored
7
assets/js/lunr/lunr.min.js
vendored
File diff suppressed because one or more lines are too long
Loading…
Reference in a new issue