mirror of
https://github.com/nunocoracao/blowfish.git
synced 2025-02-02 11:32:30 -06:00
1262 lines
29 KiB
JavaScript
1262 lines
29 KiB
JavaScript
|
/**
|
||
|
* Fuse.js v6.6.2 - Lightweight fuzzy-search (http://fusejs.io)
|
||
|
*
|
||
|
* Copyright (c) 2022 Kiro Risk (http://kiro.me)
|
||
|
* All Rights Reserved. Apache Software License 2.0
|
||
|
*
|
||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||
|
*/
|
||
|
|
||
|
function isArray(value) {
|
||
|
return !Array.isArray
|
||
|
? getTag(value) === '[object Array]'
|
||
|
: Array.isArray(value)
|
||
|
}
|
||
|
|
||
|
// Adapted from: https://github.com/lodash/lodash/blob/master/.internal/baseToString.js
|
||
|
const INFINITY = 1 / 0;
|
||
|
function baseToString(value) {
|
||
|
// Exit early for strings to avoid a performance hit in some environments.
|
||
|
if (typeof value == 'string') {
|
||
|
return value
|
||
|
}
|
||
|
let result = value + '';
|
||
|
return result == '0' && 1 / value == -INFINITY ? '-0' : result
|
||
|
}
|
||
|
|
||
|
function toString(value) {
|
||
|
return value == null ? '' : baseToString(value)
|
||
|
}
|
||
|
|
||
|
function isString(value) {
|
||
|
return typeof value === 'string'
|
||
|
}
|
||
|
|
||
|
function isNumber(value) {
|
||
|
return typeof value === 'number'
|
||
|
}
|
||
|
|
||
|
// Adapted from: https://github.com/lodash/lodash/blob/master/isBoolean.js
|
||
|
function isBoolean(value) {
|
||
|
return (
|
||
|
value === true ||
|
||
|
value === false ||
|
||
|
(isObjectLike(value) && getTag(value) == '[object Boolean]')
|
||
|
)
|
||
|
}
|
||
|
|
||
|
function isObject(value) {
|
||
|
return typeof value === 'object'
|
||
|
}
|
||
|
|
||
|
// Checks if `value` is object-like.
|
||
|
function isObjectLike(value) {
|
||
|
return isObject(value) && value !== null
|
||
|
}
|
||
|
|
||
|
function isDefined(value) {
|
||
|
return value !== undefined && value !== null
|
||
|
}
|
||
|
|
||
|
function isBlank(value) {
|
||
|
return !value.trim().length
|
||
|
}
|
||
|
|
||
|
// Gets the `toStringTag` of `value`.
|
||
|
// Adapted from: https://github.com/lodash/lodash/blob/master/.internal/getTag.js
|
||
|
function getTag(value) {
|
||
|
return value == null
|
||
|
? value === undefined
|
||
|
? '[object Undefined]'
|
||
|
: '[object Null]'
|
||
|
: Object.prototype.toString.call(value)
|
||
|
}
|
||
|
|
||
|
const EXTENDED_SEARCH_UNAVAILABLE = 'Extended search is not available';
|
||
|
|
||
|
const LOGICAL_SEARCH_UNAVAILABLE = 'Logical search is not available';
|
||
|
|
||
|
const INCORRECT_INDEX_TYPE = "Incorrect 'index' type";
|
||
|
|
||
|
const LOGICAL_SEARCH_INVALID_QUERY_FOR_KEY = (key) =>
|
||
|
`Invalid value for key ${key}`;
|
||
|
|
||
|
const PATTERN_LENGTH_TOO_LARGE = (max) =>
|
||
|
`Pattern length exceeds max of ${max}.`;
|
||
|
|
||
|
const MISSING_KEY_PROPERTY = (name) => `Missing ${name} property in key`;
|
||
|
|
||
|
const INVALID_KEY_WEIGHT_VALUE = (key) =>
|
||
|
`Property 'weight' in key '${key}' must be a positive integer`;
|
||
|
|
||
|
const hasOwn = Object.prototype.hasOwnProperty;
|
||
|
|
||
|
class KeyStore {
|
||
|
constructor(keys) {
|
||
|
this._keys = [];
|
||
|
this._keyMap = {};
|
||
|
|
||
|
let totalWeight = 0;
|
||
|
|
||
|
keys.forEach((key) => {
|
||
|
let obj = createKey(key);
|
||
|
|
||
|
totalWeight += obj.weight;
|
||
|
|
||
|
this._keys.push(obj);
|
||
|
this._keyMap[obj.id] = obj;
|
||
|
|
||
|
totalWeight += obj.weight;
|
||
|
});
|
||
|
|
||
|
// Normalize weights so that their sum is equal to 1
|
||
|
this._keys.forEach((key) => {
|
||
|
key.weight /= totalWeight;
|
||
|
});
|
||
|
}
|
||
|
get(keyId) {
|
||
|
return this._keyMap[keyId]
|
||
|
}
|
||
|
keys() {
|
||
|
return this._keys
|
||
|
}
|
||
|
toJSON() {
|
||
|
return JSON.stringify(this._keys)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function createKey(key) {
|
||
|
let path = null;
|
||
|
let id = null;
|
||
|
let src = null;
|
||
|
let weight = 1;
|
||
|
let getFn = null;
|
||
|
|
||
|
if (isString(key) || isArray(key)) {
|
||
|
src = key;
|
||
|
path = createKeyPath(key);
|
||
|
id = createKeyId(key);
|
||
|
} else {
|
||
|
if (!hasOwn.call(key, 'name')) {
|
||
|
throw new Error(MISSING_KEY_PROPERTY('name'))
|
||
|
}
|
||
|
|
||
|
const name = key.name;
|
||
|
src = name;
|
||
|
|
||
|
if (hasOwn.call(key, 'weight')) {
|
||
|
weight = key.weight;
|
||
|
|
||
|
if (weight <= 0) {
|
||
|
throw new Error(INVALID_KEY_WEIGHT_VALUE(name))
|
||
|
}
|
||
|
}
|
||
|
|
||
|
path = createKeyPath(name);
|
||
|
id = createKeyId(name);
|
||
|
getFn = key.getFn;
|
||
|
}
|
||
|
|
||
|
return { path, id, weight, src, getFn }
|
||
|
}
|
||
|
|
||
|
function createKeyPath(key) {
|
||
|
return isArray(key) ? key : key.split('.')
|
||
|
}
|
||
|
|
||
|
function createKeyId(key) {
|
||
|
return isArray(key) ? key.join('.') : key
|
||
|
}
|
||
|
|
||
|
function get(obj, path) {
|
||
|
let list = [];
|
||
|
let arr = false;
|
||
|
|
||
|
const deepGet = (obj, path, index) => {
|
||
|
if (!isDefined(obj)) {
|
||
|
return
|
||
|
}
|
||
|
if (!path[index]) {
|
||
|
// If there's no path left, we've arrived at the object we care about.
|
||
|
list.push(obj);
|
||
|
} else {
|
||
|
let key = path[index];
|
||
|
|
||
|
const value = obj[key];
|
||
|
|
||
|
if (!isDefined(value)) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
// If we're at the last value in the path, and if it's a string/number/bool,
|
||
|
// add it to the list
|
||
|
if (
|
||
|
index === path.length - 1 &&
|
||
|
(isString(value) || isNumber(value) || isBoolean(value))
|
||
|
) {
|
||
|
list.push(toString(value));
|
||
|
} else if (isArray(value)) {
|
||
|
arr = true;
|
||
|
// Search each item in the array.
|
||
|
for (let i = 0, len = value.length; i < len; i += 1) {
|
||
|
deepGet(value[i], path, index + 1);
|
||
|
}
|
||
|
} else if (path.length) {
|
||
|
// An object. Recurse further.
|
||
|
deepGet(value, path, index + 1);
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// Backwards compatibility (since path used to be a string)
|
||
|
deepGet(obj, isString(path) ? path.split('.') : path, 0);
|
||
|
|
||
|
return arr ? list : list[0]
|
||
|
}
|
||
|
|
||
|
const MatchOptions = {
|
||
|
// Whether the matches should be included in the result set. When `true`, each record in the result
|
||
|
// set will include the indices of the matched characters.
|
||
|
// These can consequently be used for highlighting purposes.
|
||
|
includeMatches: false,
|
||
|
// When `true`, the matching function will continue to the end of a search pattern even if
|
||
|
// a perfect match has already been located in the string.
|
||
|
findAllMatches: false,
|
||
|
// Minimum number of characters that must be matched before a result is considered a match
|
||
|
minMatchCharLength: 1
|
||
|
};
|
||
|
|
||
|
const BasicOptions = {
|
||
|
// When `true`, the algorithm continues searching to the end of the input even if a perfect
|
||
|
// match is found before the end of the same input.
|
||
|
isCaseSensitive: false,
|
||
|
// When true, the matching function will continue to the end of a search pattern even if
|
||
|
includeScore: false,
|
||
|
// List of properties that will be searched. This also supports nested properties.
|
||
|
keys: [],
|
||
|
// Whether to sort the result list, by score
|
||
|
shouldSort: true,
|
||
|
// Default sort function: sort by ascending score, ascending index
|
||
|
sortFn: (a, b) =>
|
||
|
a.score === b.score ? (a.idx < b.idx ? -1 : 1) : a.score < b.score ? -1 : 1
|
||
|
};
|
||
|
|
||
|
const FuzzyOptions = {
|
||
|
// Approximately where in the text is the pattern expected to be found?
|
||
|
location: 0,
|
||
|
// At what point does the match algorithm give up. A threshold of '0.0' requires a perfect match
|
||
|
// (of both letters and location), a threshold of '1.0' would match anything.
|
||
|
threshold: 0.6,
|
||
|
// Determines how close the match must be to the fuzzy location (specified above).
|
||
|
// An exact letter match which is 'distance' characters away from the fuzzy location
|
||
|
// would score as a complete mismatch. A distance of '0' requires the match be at
|
||
|
// the exact location specified, a threshold of '1000' would require a perfect match
|
||
|
// to be within 800 characters of the fuzzy location to be found using a 0.8 threshold.
|
||
|
distance: 100
|
||
|
};
|
||
|
|
||
|
const AdvancedOptions = {
|
||
|
// When `true`, it enables the use of unix-like search commands
|
||
|
useExtendedSearch: false,
|
||
|
// The get function to use when fetching an object's properties.
|
||
|
// The default will search nested paths *ie foo.bar.baz*
|
||
|
getFn: get,
|
||
|
// When `true`, search will ignore `location` and `distance`, so it won't matter
|
||
|
// where in the string the pattern appears.
|
||
|
// More info: https://fusejs.io/concepts/scoring-theory.html#fuzziness-score
|
||
|
ignoreLocation: false,
|
||
|
// When `true`, the calculation for the relevance score (used for sorting) will
|
||
|
// ignore the field-length norm.
|
||
|
// More info: https://fusejs.io/concepts/scoring-theory.html#field-length-norm
|
||
|
ignoreFieldNorm: false,
|
||
|
// The weight to determine how much field length norm effects scoring.
|
||
|
fieldNormWeight: 1
|
||
|
};
|
||
|
|
||
|
var Config = {
|
||
|
...BasicOptions,
|
||
|
...MatchOptions,
|
||
|
...FuzzyOptions,
|
||
|
...AdvancedOptions
|
||
|
};
|
||
|
|
||
|
const SPACE = /[^ ]+/g;
|
||
|
|
||
|
// Field-length norm: the shorter the field, the higher the weight.
|
||
|
// Set to 3 decimals to reduce index size.
|
||
|
function norm(weight = 1, mantissa = 3) {
|
||
|
const cache = new Map();
|
||
|
const m = Math.pow(10, mantissa);
|
||
|
|
||
|
return {
|
||
|
get(value) {
|
||
|
const numTokens = value.match(SPACE).length;
|
||
|
|
||
|
if (cache.has(numTokens)) {
|
||
|
return cache.get(numTokens)
|
||
|
}
|
||
|
|
||
|
// Default function is 1/sqrt(x), weight makes that variable
|
||
|
const norm = 1 / Math.pow(numTokens, 0.5 * weight);
|
||
|
|
||
|
// In place of `toFixed(mantissa)`, for faster computation
|
||
|
const n = parseFloat(Math.round(norm * m) / m);
|
||
|
|
||
|
cache.set(numTokens, n);
|
||
|
|
||
|
return n
|
||
|
},
|
||
|
clear() {
|
||
|
cache.clear();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
class FuseIndex {
|
||
|
constructor({
|
||
|
getFn = Config.getFn,
|
||
|
fieldNormWeight = Config.fieldNormWeight
|
||
|
} = {}) {
|
||
|
this.norm = norm(fieldNormWeight, 3);
|
||
|
this.getFn = getFn;
|
||
|
this.isCreated = false;
|
||
|
|
||
|
this.setIndexRecords();
|
||
|
}
|
||
|
setSources(docs = []) {
|
||
|
this.docs = docs;
|
||
|
}
|
||
|
setIndexRecords(records = []) {
|
||
|
this.records = records;
|
||
|
}
|
||
|
setKeys(keys = []) {
|
||
|
this.keys = keys;
|
||
|
this._keysMap = {};
|
||
|
keys.forEach((key, idx) => {
|
||
|
this._keysMap[key.id] = idx;
|
||
|
});
|
||
|
}
|
||
|
create() {
|
||
|
if (this.isCreated || !this.docs.length) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
this.isCreated = true;
|
||
|
|
||
|
// List is Array<String>
|
||
|
if (isString(this.docs[0])) {
|
||
|
this.docs.forEach((doc, docIndex) => {
|
||
|
this._addString(doc, docIndex);
|
||
|
});
|
||
|
} else {
|
||
|
// List is Array<Object>
|
||
|
this.docs.forEach((doc, docIndex) => {
|
||
|
this._addObject(doc, docIndex);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
this.norm.clear();
|
||
|
}
|
||
|
// Adds a doc to the end of the index
|
||
|
add(doc) {
|
||
|
const idx = this.size();
|
||
|
|
||
|
if (isString(doc)) {
|
||
|
this._addString(doc, idx);
|
||
|
} else {
|
||
|
this._addObject(doc, idx);
|
||
|
}
|
||
|
}
|
||
|
// Removes the doc at the specified index of the index
|
||
|
removeAt(idx) {
|
||
|
this.records.splice(idx, 1);
|
||
|
|
||
|
// Change ref index of every subsquent doc
|
||
|
for (let i = idx, len = this.size(); i < len; i += 1) {
|
||
|
this.records[i].i -= 1;
|
||
|
}
|
||
|
}
|
||
|
getValueForItemAtKeyId(item, keyId) {
|
||
|
return item[this._keysMap[keyId]]
|
||
|
}
|
||
|
size() {
|
||
|
return this.records.length
|
||
|
}
|
||
|
_addString(doc, docIndex) {
|
||
|
if (!isDefined(doc) || isBlank(doc)) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
let record = {
|
||
|
v: doc,
|
||
|
i: docIndex,
|
||
|
n: this.norm.get(doc)
|
||
|
};
|
||
|
|
||
|
this.records.push(record);
|
||
|
}
|
||
|
_addObject(doc, docIndex) {
|
||
|
let record = { i: docIndex, $: {} };
|
||
|
|
||
|
// Iterate over every key (i.e, path), and fetch the value at that key
|
||
|
this.keys.forEach((key, keyIndex) => {
|
||
|
let value = key.getFn ? key.getFn(doc) : this.getFn(doc, key.path);
|
||
|
|
||
|
if (!isDefined(value)) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
if (isArray(value)) {
|
||
|
let subRecords = [];
|
||
|
const stack = [{ nestedArrIndex: -1, value }];
|
||
|
|
||
|
while (stack.length) {
|
||
|
const { nestedArrIndex, value } = stack.pop();
|
||
|
|
||
|
if (!isDefined(value)) {
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
if (isString(value) && !isBlank(value)) {
|
||
|
let subRecord = {
|
||
|
v: value,
|
||
|
i: nestedArrIndex,
|
||
|
n: this.norm.get(value)
|
||
|
};
|
||
|
|
||
|
subRecords.push(subRecord);
|
||
|
} else if (isArray(value)) {
|
||
|
value.forEach((item, k) => {
|
||
|
stack.push({
|
||
|
nestedArrIndex: k,
|
||
|
value: item
|
||
|
});
|
||
|
});
|
||
|
} else ;
|
||
|
}
|
||
|
record.$[keyIndex] = subRecords;
|
||
|
} else if (isString(value) && !isBlank(value)) {
|
||
|
let subRecord = {
|
||
|
v: value,
|
||
|
n: this.norm.get(value)
|
||
|
};
|
||
|
|
||
|
record.$[keyIndex] = subRecord;
|
||
|
}
|
||
|
});
|
||
|
|
||
|
this.records.push(record);
|
||
|
}
|
||
|
toJSON() {
|
||
|
return {
|
||
|
keys: this.keys,
|
||
|
records: this.records
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function createIndex(
|
||
|
keys,
|
||
|
docs,
|
||
|
{ getFn = Config.getFn, fieldNormWeight = Config.fieldNormWeight } = {}
|
||
|
) {
|
||
|
const myIndex = new FuseIndex({ getFn, fieldNormWeight });
|
||
|
myIndex.setKeys(keys.map(createKey));
|
||
|
myIndex.setSources(docs);
|
||
|
myIndex.create();
|
||
|
return myIndex
|
||
|
}
|
||
|
|
||
|
function parseIndex(
|
||
|
data,
|
||
|
{ getFn = Config.getFn, fieldNormWeight = Config.fieldNormWeight } = {}
|
||
|
) {
|
||
|
const { keys, records } = data;
|
||
|
const myIndex = new FuseIndex({ getFn, fieldNormWeight });
|
||
|
myIndex.setKeys(keys);
|
||
|
myIndex.setIndexRecords(records);
|
||
|
return myIndex
|
||
|
}
|
||
|
|
||
|
function computeScore$1(
|
||
|
pattern,
|
||
|
{
|
||
|
errors = 0,
|
||
|
currentLocation = 0,
|
||
|
expectedLocation = 0,
|
||
|
distance = Config.distance,
|
||
|
ignoreLocation = Config.ignoreLocation
|
||
|
} = {}
|
||
|
) {
|
||
|
const accuracy = errors / pattern.length;
|
||
|
|
||
|
if (ignoreLocation) {
|
||
|
return accuracy
|
||
|
}
|
||
|
|
||
|
const proximity = Math.abs(expectedLocation - currentLocation);
|
||
|
|
||
|
if (!distance) {
|
||
|
// Dodge divide by zero error.
|
||
|
return proximity ? 1.0 : accuracy
|
||
|
}
|
||
|
|
||
|
return accuracy + proximity / distance
|
||
|
}
|
||
|
|
||
|
function convertMaskToIndices(
|
||
|
matchmask = [],
|
||
|
minMatchCharLength = Config.minMatchCharLength
|
||
|
) {
|
||
|
let indices = [];
|
||
|
let start = -1;
|
||
|
let end = -1;
|
||
|
let i = 0;
|
||
|
|
||
|
for (let len = matchmask.length; i < len; i += 1) {
|
||
|
let match = matchmask[i];
|
||
|
if (match && start === -1) {
|
||
|
start = i;
|
||
|
} else if (!match && start !== -1) {
|
||
|
end = i - 1;
|
||
|
if (end - start + 1 >= minMatchCharLength) {
|
||
|
indices.push([start, end]);
|
||
|
}
|
||
|
start = -1;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// (i-1 - start) + 1 => i - start
|
||
|
if (matchmask[i - 1] && i - start >= minMatchCharLength) {
|
||
|
indices.push([start, i - 1]);
|
||
|
}
|
||
|
|
||
|
return indices
|
||
|
}
|
||
|
|
||
|
// Machine word size
|
||
|
const MAX_BITS = 32;
|
||
|
|
||
|
function search(
|
||
|
text,
|
||
|
pattern,
|
||
|
patternAlphabet,
|
||
|
{
|
||
|
location = Config.location,
|
||
|
distance = Config.distance,
|
||
|
threshold = Config.threshold,
|
||
|
findAllMatches = Config.findAllMatches,
|
||
|
minMatchCharLength = Config.minMatchCharLength,
|
||
|
includeMatches = Config.includeMatches,
|
||
|
ignoreLocation = Config.ignoreLocation
|
||
|
} = {}
|
||
|
) {
|
||
|
if (pattern.length > MAX_BITS) {
|
||
|
throw new Error(PATTERN_LENGTH_TOO_LARGE(MAX_BITS))
|
||
|
}
|
||
|
|
||
|
const patternLen = pattern.length;
|
||
|
// Set starting location at beginning text and initialize the alphabet.
|
||
|
const textLen = text.length;
|
||
|
// Handle the case when location > text.length
|
||
|
const expectedLocation = Math.max(0, Math.min(location, textLen));
|
||
|
// Highest score beyond which we give up.
|
||
|
let currentThreshold = threshold;
|
||
|
// Is there a nearby exact match? (speedup)
|
||
|
let bestLocation = expectedLocation;
|
||
|
|
||
|
// Performance: only computer matches when the minMatchCharLength > 1
|
||
|
// OR if `includeMatches` is true.
|
||
|
const computeMatches = minMatchCharLength > 1 || includeMatches;
|
||
|
// A mask of the matches, used for building the indices
|
||
|
const matchMask = computeMatches ? Array(textLen) : [];
|
||
|
|
||
|
let index;
|
||
|
|
||
|
// Get all exact matches, here for speed up
|
||
|
while ((index = text.indexOf(pattern, bestLocation)) > -1) {
|
||
|
let score = computeScore$1(pattern, {
|
||
|
currentLocation: index,
|
||
|
expectedLocation,
|
||
|
distance,
|
||
|
ignoreLocation
|
||
|
});
|
||
|
|
||
|
currentThreshold = Math.min(score, currentThreshold);
|
||
|
bestLocation = index + patternLen;
|
||
|
|
||
|
if (computeMatches) {
|
||
|
let i = 0;
|
||
|
while (i < patternLen) {
|
||
|
matchMask[index + i] = 1;
|
||
|
i += 1;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// Reset the best location
|
||
|
bestLocation = -1;
|
||
|
|
||
|
let lastBitArr = [];
|
||
|
let finalScore = 1;
|
||
|
let binMax = patternLen + textLen;
|
||
|
|
||
|
const mask = 1 << (patternLen - 1);
|
||
|
|
||
|
for (let i = 0; i < patternLen; i += 1) {
|
||
|
// Scan for the best match; each iteration allows for one more error.
|
||
|
// Run a binary search to determine how far from the match location we can stray
|
||
|
// at this error level.
|
||
|
let binMin = 0;
|
||
|
let binMid = binMax;
|
||
|
|
||
|
while (binMin < binMid) {
|
||
|
const score = computeScore$1(pattern, {
|
||
|
errors: i,
|
||
|
currentLocation: expectedLocation + binMid,
|
||
|
expectedLocation,
|
||
|
distance,
|
||
|
ignoreLocation
|
||
|
});
|
||
|
|
||
|
if (score <= currentThreshold) {
|
||
|
binMin = binMid;
|
||
|
} else {
|
||
|
binMax = binMid;
|
||
|
}
|
||
|
|
||
|
binMid = Math.floor((binMax - binMin) / 2 + binMin);
|
||
|
}
|
||
|
|
||
|
// Use the result from this iteration as the maximum for the next.
|
||
|
binMax = binMid;
|
||
|
|
||
|
let start = Math.max(1, expectedLocation - binMid + 1);
|
||
|
let finish = findAllMatches
|
||
|
? textLen
|
||
|
: Math.min(expectedLocation + binMid, textLen) + patternLen;
|
||
|
|
||
|
// Initialize the bit array
|
||
|
let bitArr = Array(finish + 2);
|
||
|
|
||
|
bitArr[finish + 1] = (1 << i) - 1;
|
||
|
|
||
|
for (let j = finish; j >= start; j -= 1) {
|
||
|
let currentLocation = j - 1;
|
||
|
let charMatch = patternAlphabet[text.charAt(currentLocation)];
|
||
|
|
||
|
if (computeMatches) {
|
||
|
// Speed up: quick bool to int conversion (i.e, `charMatch ? 1 : 0`)
|
||
|
matchMask[currentLocation] = +!!charMatch;
|
||
|
}
|
||
|
|
||
|
// First pass: exact match
|
||
|
bitArr[j] = ((bitArr[j + 1] << 1) | 1) & charMatch;
|
||
|
|
||
|
// Subsequent passes: fuzzy match
|
||
|
if (i) {
|
||
|
bitArr[j] |=
|
||
|
((lastBitArr[j + 1] | lastBitArr[j]) << 1) | 1 | lastBitArr[j + 1];
|
||
|
}
|
||
|
|
||
|
if (bitArr[j] & mask) {
|
||
|
finalScore = computeScore$1(pattern, {
|
||
|
errors: i,
|
||
|
currentLocation,
|
||
|
expectedLocation,
|
||
|
distance,
|
||
|
ignoreLocation
|
||
|
});
|
||
|
|
||
|
// This match will almost certainly be better than any existing match.
|
||
|
// But check anyway.
|
||
|
if (finalScore <= currentThreshold) {
|
||
|
// Indeed it is
|
||
|
currentThreshold = finalScore;
|
||
|
bestLocation = currentLocation;
|
||
|
|
||
|
// Already passed `loc`, downhill from here on in.
|
||
|
if (bestLocation <= expectedLocation) {
|
||
|
break
|
||
|
}
|
||
|
|
||
|
// When passing `bestLocation`, don't exceed our current distance from `expectedLocation`.
|
||
|
start = Math.max(1, 2 * expectedLocation - bestLocation);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// No hope for a (better) match at greater error levels.
|
||
|
const score = computeScore$1(pattern, {
|
||
|
errors: i + 1,
|
||
|
currentLocation: expectedLocation,
|
||
|
expectedLocation,
|
||
|
distance,
|
||
|
ignoreLocation
|
||
|
});
|
||
|
|
||
|
if (score > currentThreshold) {
|
||
|
break
|
||
|
}
|
||
|
|
||
|
lastBitArr = bitArr;
|
||
|
}
|
||
|
|
||
|
const result = {
|
||
|
isMatch: bestLocation >= 0,
|
||
|
// Count exact matches (those with a score of 0) to be "almost" exact
|
||
|
score: Math.max(0.001, finalScore)
|
||
|
};
|
||
|
|
||
|
if (computeMatches) {
|
||
|
const indices = convertMaskToIndices(matchMask, minMatchCharLength);
|
||
|
if (!indices.length) {
|
||
|
result.isMatch = false;
|
||
|
} else if (includeMatches) {
|
||
|
result.indices = indices;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return result
|
||
|
}
|
||
|
|
||
|
function createPatternAlphabet(pattern) {
|
||
|
let mask = {};
|
||
|
|
||
|
for (let i = 0, len = pattern.length; i < len; i += 1) {
|
||
|
const char = pattern.charAt(i);
|
||
|
mask[char] = (mask[char] || 0) | (1 << (len - i - 1));
|
||
|
}
|
||
|
|
||
|
return mask
|
||
|
}
|
||
|
|
||
|
class BitapSearch {
|
||
|
constructor(
|
||
|
pattern,
|
||
|
{
|
||
|
location = Config.location,
|
||
|
threshold = Config.threshold,
|
||
|
distance = Config.distance,
|
||
|
includeMatches = Config.includeMatches,
|
||
|
findAllMatches = Config.findAllMatches,
|
||
|
minMatchCharLength = Config.minMatchCharLength,
|
||
|
isCaseSensitive = Config.isCaseSensitive,
|
||
|
ignoreLocation = Config.ignoreLocation
|
||
|
} = {}
|
||
|
) {
|
||
|
this.options = {
|
||
|
location,
|
||
|
threshold,
|
||
|
distance,
|
||
|
includeMatches,
|
||
|
findAllMatches,
|
||
|
minMatchCharLength,
|
||
|
isCaseSensitive,
|
||
|
ignoreLocation
|
||
|
};
|
||
|
|
||
|
this.pattern = isCaseSensitive ? pattern : pattern.toLowerCase();
|
||
|
|
||
|
this.chunks = [];
|
||
|
|
||
|
if (!this.pattern.length) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
const addChunk = (pattern, startIndex) => {
|
||
|
this.chunks.push({
|
||
|
pattern,
|
||
|
alphabet: createPatternAlphabet(pattern),
|
||
|
startIndex
|
||
|
});
|
||
|
};
|
||
|
|
||
|
const len = this.pattern.length;
|
||
|
|
||
|
if (len > MAX_BITS) {
|
||
|
let i = 0;
|
||
|
const remainder = len % MAX_BITS;
|
||
|
const end = len - remainder;
|
||
|
|
||
|
while (i < end) {
|
||
|
addChunk(this.pattern.substr(i, MAX_BITS), i);
|
||
|
i += MAX_BITS;
|
||
|
}
|
||
|
|
||
|
if (remainder) {
|
||
|
const startIndex = len - MAX_BITS;
|
||
|
addChunk(this.pattern.substr(startIndex), startIndex);
|
||
|
}
|
||
|
} else {
|
||
|
addChunk(this.pattern, 0);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
searchIn(text) {
|
||
|
const { isCaseSensitive, includeMatches } = this.options;
|
||
|
|
||
|
if (!isCaseSensitive) {
|
||
|
text = text.toLowerCase();
|
||
|
}
|
||
|
|
||
|
// Exact match
|
||
|
if (this.pattern === text) {
|
||
|
let result = {
|
||
|
isMatch: true,
|
||
|
score: 0
|
||
|
};
|
||
|
|
||
|
if (includeMatches) {
|
||
|
result.indices = [[0, text.length - 1]];
|
||
|
}
|
||
|
|
||
|
return result
|
||
|
}
|
||
|
|
||
|
// Otherwise, use Bitap algorithm
|
||
|
const {
|
||
|
location,
|
||
|
distance,
|
||
|
threshold,
|
||
|
findAllMatches,
|
||
|
minMatchCharLength,
|
||
|
ignoreLocation
|
||
|
} = this.options;
|
||
|
|
||
|
let allIndices = [];
|
||
|
let totalScore = 0;
|
||
|
let hasMatches = false;
|
||
|
|
||
|
this.chunks.forEach(({ pattern, alphabet, startIndex }) => {
|
||
|
const { isMatch, score, indices } = search(text, pattern, alphabet, {
|
||
|
location: location + startIndex,
|
||
|
distance,
|
||
|
threshold,
|
||
|
findAllMatches,
|
||
|
minMatchCharLength,
|
||
|
includeMatches,
|
||
|
ignoreLocation
|
||
|
});
|
||
|
|
||
|
if (isMatch) {
|
||
|
hasMatches = true;
|
||
|
}
|
||
|
|
||
|
totalScore += score;
|
||
|
|
||
|
if (isMatch && indices) {
|
||
|
allIndices = [...allIndices, ...indices];
|
||
|
}
|
||
|
});
|
||
|
|
||
|
let result = {
|
||
|
isMatch: hasMatches,
|
||
|
score: hasMatches ? totalScore / this.chunks.length : 1
|
||
|
};
|
||
|
|
||
|
if (hasMatches && includeMatches) {
|
||
|
result.indices = allIndices;
|
||
|
}
|
||
|
|
||
|
return result
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const registeredSearchers = [];
|
||
|
|
||
|
function createSearcher(pattern, options) {
|
||
|
for (let i = 0, len = registeredSearchers.length; i < len; i += 1) {
|
||
|
let searcherClass = registeredSearchers[i];
|
||
|
if (searcherClass.condition(pattern, options)) {
|
||
|
return new searcherClass(pattern, options)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return new BitapSearch(pattern, options)
|
||
|
}
|
||
|
|
||
|
const LogicalOperator = {
|
||
|
AND: '$and',
|
||
|
OR: '$or'
|
||
|
};
|
||
|
|
||
|
const KeyType = {
|
||
|
PATH: '$path',
|
||
|
PATTERN: '$val'
|
||
|
};
|
||
|
|
||
|
const isExpression = (query) =>
|
||
|
!!(query[LogicalOperator.AND] || query[LogicalOperator.OR]);
|
||
|
|
||
|
const isPath = (query) => !!query[KeyType.PATH];
|
||
|
|
||
|
const isLeaf = (query) =>
|
||
|
!isArray(query) && isObject(query) && !isExpression(query);
|
||
|
|
||
|
const convertToExplicit = (query) => ({
|
||
|
[LogicalOperator.AND]: Object.keys(query).map((key) => ({
|
||
|
[key]: query[key]
|
||
|
}))
|
||
|
});
|
||
|
|
||
|
// When `auto` is `true`, the parse function will infer and initialize and add
|
||
|
// the appropriate `Searcher` instance
|
||
|
function parse(query, options, { auto = true } = {}) {
|
||
|
const next = (query) => {
|
||
|
let keys = Object.keys(query);
|
||
|
|
||
|
const isQueryPath = isPath(query);
|
||
|
|
||
|
if (!isQueryPath && keys.length > 1 && !isExpression(query)) {
|
||
|
return next(convertToExplicit(query))
|
||
|
}
|
||
|
|
||
|
if (isLeaf(query)) {
|
||
|
const key = isQueryPath ? query[KeyType.PATH] : keys[0];
|
||
|
|
||
|
const pattern = isQueryPath ? query[KeyType.PATTERN] : query[key];
|
||
|
|
||
|
if (!isString(pattern)) {
|
||
|
throw new Error(LOGICAL_SEARCH_INVALID_QUERY_FOR_KEY(key))
|
||
|
}
|
||
|
|
||
|
const obj = {
|
||
|
keyId: createKeyId(key),
|
||
|
pattern
|
||
|
};
|
||
|
|
||
|
if (auto) {
|
||
|
obj.searcher = createSearcher(pattern, options);
|
||
|
}
|
||
|
|
||
|
return obj
|
||
|
}
|
||
|
|
||
|
let node = {
|
||
|
children: [],
|
||
|
operator: keys[0]
|
||
|
};
|
||
|
|
||
|
keys.forEach((key) => {
|
||
|
const value = query[key];
|
||
|
|
||
|
if (isArray(value)) {
|
||
|
value.forEach((item) => {
|
||
|
node.children.push(next(item));
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
|
||
|
return node
|
||
|
};
|
||
|
|
||
|
if (!isExpression(query)) {
|
||
|
query = convertToExplicit(query);
|
||
|
}
|
||
|
|
||
|
return next(query)
|
||
|
}
|
||
|
|
||
|
// Practical scoring function
|
||
|
function computeScore(
|
||
|
results,
|
||
|
{ ignoreFieldNorm = Config.ignoreFieldNorm }
|
||
|
) {
|
||
|
results.forEach((result) => {
|
||
|
let totalScore = 1;
|
||
|
|
||
|
result.matches.forEach(({ key, norm, score }) => {
|
||
|
const weight = key ? key.weight : null;
|
||
|
|
||
|
totalScore *= Math.pow(
|
||
|
score === 0 && weight ? Number.EPSILON : score,
|
||
|
(weight || 1) * (ignoreFieldNorm ? 1 : norm)
|
||
|
);
|
||
|
});
|
||
|
|
||
|
result.score = totalScore;
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function transformMatches(result, data) {
|
||
|
const matches = result.matches;
|
||
|
data.matches = [];
|
||
|
|
||
|
if (!isDefined(matches)) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
matches.forEach((match) => {
|
||
|
if (!isDefined(match.indices) || !match.indices.length) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
const { indices, value } = match;
|
||
|
|
||
|
let obj = {
|
||
|
indices,
|
||
|
value
|
||
|
};
|
||
|
|
||
|
if (match.key) {
|
||
|
obj.key = match.key.src;
|
||
|
}
|
||
|
|
||
|
if (match.idx > -1) {
|
||
|
obj.refIndex = match.idx;
|
||
|
}
|
||
|
|
||
|
data.matches.push(obj);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function transformScore(result, data) {
|
||
|
data.score = result.score;
|
||
|
}
|
||
|
|
||
|
function format(
|
||
|
results,
|
||
|
docs,
|
||
|
{
|
||
|
includeMatches = Config.includeMatches,
|
||
|
includeScore = Config.includeScore
|
||
|
} = {}
|
||
|
) {
|
||
|
const transformers = [];
|
||
|
|
||
|
if (includeMatches) transformers.push(transformMatches);
|
||
|
if (includeScore) transformers.push(transformScore);
|
||
|
|
||
|
return results.map((result) => {
|
||
|
const { idx } = result;
|
||
|
|
||
|
const data = {
|
||
|
item: docs[idx],
|
||
|
refIndex: idx
|
||
|
};
|
||
|
|
||
|
if (transformers.length) {
|
||
|
transformers.forEach((transformer) => {
|
||
|
transformer(result, data);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
return data
|
||
|
})
|
||
|
}
|
||
|
|
||
|
class Fuse {
|
||
|
constructor(docs, options = {}, index) {
|
||
|
this.options = { ...Config, ...options };
|
||
|
|
||
|
if (
|
||
|
this.options.useExtendedSearch &&
|
||
|
!false
|
||
|
) {
|
||
|
throw new Error(EXTENDED_SEARCH_UNAVAILABLE)
|
||
|
}
|
||
|
|
||
|
this._keyStore = new KeyStore(this.options.keys);
|
||
|
|
||
|
this.setCollection(docs, index);
|
||
|
}
|
||
|
|
||
|
setCollection(docs, index) {
|
||
|
this._docs = docs;
|
||
|
|
||
|
if (index && !(index instanceof FuseIndex)) {
|
||
|
throw new Error(INCORRECT_INDEX_TYPE)
|
||
|
}
|
||
|
|
||
|
this._myIndex =
|
||
|
index ||
|
||
|
createIndex(this.options.keys, this._docs, {
|
||
|
getFn: this.options.getFn,
|
||
|
fieldNormWeight: this.options.fieldNormWeight
|
||
|
});
|
||
|
}
|
||
|
|
||
|
add(doc) {
|
||
|
if (!isDefined(doc)) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
this._docs.push(doc);
|
||
|
this._myIndex.add(doc);
|
||
|
}
|
||
|
|
||
|
remove(predicate = (/* doc, idx */) => false) {
|
||
|
const results = [];
|
||
|
|
||
|
for (let i = 0, len = this._docs.length; i < len; i += 1) {
|
||
|
const doc = this._docs[i];
|
||
|
if (predicate(doc, i)) {
|
||
|
this.removeAt(i);
|
||
|
i -= 1;
|
||
|
len -= 1;
|
||
|
|
||
|
results.push(doc);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return results
|
||
|
}
|
||
|
|
||
|
removeAt(idx) {
|
||
|
this._docs.splice(idx, 1);
|
||
|
this._myIndex.removeAt(idx);
|
||
|
}
|
||
|
|
||
|
getIndex() {
|
||
|
return this._myIndex
|
||
|
}
|
||
|
|
||
|
search(query, { limit = -1 } = {}) {
|
||
|
const {
|
||
|
includeMatches,
|
||
|
includeScore,
|
||
|
shouldSort,
|
||
|
sortFn,
|
||
|
ignoreFieldNorm
|
||
|
} = this.options;
|
||
|
|
||
|
let results = isString(query)
|
||
|
? isString(this._docs[0])
|
||
|
? this._searchStringList(query)
|
||
|
: this._searchObjectList(query)
|
||
|
: this._searchLogical(query);
|
||
|
|
||
|
computeScore(results, { ignoreFieldNorm });
|
||
|
|
||
|
if (shouldSort) {
|
||
|
results.sort(sortFn);
|
||
|
}
|
||
|
|
||
|
if (isNumber(limit) && limit > -1) {
|
||
|
results = results.slice(0, limit);
|
||
|
}
|
||
|
|
||
|
return format(results, this._docs, {
|
||
|
includeMatches,
|
||
|
includeScore
|
||
|
})
|
||
|
}
|
||
|
|
||
|
_searchStringList(query) {
|
||
|
const searcher = createSearcher(query, this.options);
|
||
|
const { records } = this._myIndex;
|
||
|
const results = [];
|
||
|
|
||
|
// Iterate over every string in the index
|
||
|
records.forEach(({ v: text, i: idx, n: norm }) => {
|
||
|
if (!isDefined(text)) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
const { isMatch, score, indices } = searcher.searchIn(text);
|
||
|
|
||
|
if (isMatch) {
|
||
|
results.push({
|
||
|
item: text,
|
||
|
idx,
|
||
|
matches: [{ score, value: text, norm, indices }]
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
|
||
|
return results
|
||
|
}
|
||
|
|
||
|
_searchLogical(query) {
|
||
|
{
|
||
|
throw new Error(LOGICAL_SEARCH_UNAVAILABLE)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
_searchObjectList(query) {
|
||
|
const searcher = createSearcher(query, this.options);
|
||
|
const { keys, records } = this._myIndex;
|
||
|
const results = [];
|
||
|
|
||
|
// List is Array<Object>
|
||
|
records.forEach(({ $: item, i: idx }) => {
|
||
|
if (!isDefined(item)) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
let matches = [];
|
||
|
|
||
|
// Iterate over every key (i.e, path), and fetch the value at that key
|
||
|
keys.forEach((key, keyIndex) => {
|
||
|
matches.push(
|
||
|
...this._findMatches({
|
||
|
key,
|
||
|
value: item[keyIndex],
|
||
|
searcher
|
||
|
})
|
||
|
);
|
||
|
});
|
||
|
|
||
|
if (matches.length) {
|
||
|
results.push({
|
||
|
idx,
|
||
|
item,
|
||
|
matches
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
|
||
|
return results
|
||
|
}
|
||
|
_findMatches({ key, value, searcher }) {
|
||
|
if (!isDefined(value)) {
|
||
|
return []
|
||
|
}
|
||
|
|
||
|
let matches = [];
|
||
|
|
||
|
if (isArray(value)) {
|
||
|
value.forEach(({ v: text, i: idx, n: norm }) => {
|
||
|
if (!isDefined(text)) {
|
||
|
return
|
||
|
}
|
||
|
|
||
|
const { isMatch, score, indices } = searcher.searchIn(text);
|
||
|
|
||
|
if (isMatch) {
|
||
|
matches.push({
|
||
|
score,
|
||
|
key,
|
||
|
value: text,
|
||
|
idx,
|
||
|
norm,
|
||
|
indices
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
} else {
|
||
|
const { v: text, n: norm } = value;
|
||
|
|
||
|
const { isMatch, score, indices } = searcher.searchIn(text);
|
||
|
|
||
|
if (isMatch) {
|
||
|
matches.push({ score, key, value: text, norm, indices });
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return matches
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Fuse.version = '6.6.2';
|
||
|
Fuse.createIndex = createIndex;
|
||
|
Fuse.parseIndex = parseIndex;
|
||
|
Fuse.config = Config;
|
||
|
|
||
|
{
|
||
|
Fuse.parseQuery = parse;
|
||
|
}
|
||
|
|
||
|
export { Fuse as default };
|