Manually tokenize search terms and emoji indexes

release
Etienne Lemay 2016-07-07 16:29:04 -04:00
parent 5e0dc3bf63
commit 19eea428b8
1 changed files with 17 additions and 3 deletions

View File

@ -11,27 +11,41 @@ export default class Search extends React.Component {
buildIndex() {
this.index = lunr(function() {
this.pipeline.reset()
this.field('short_name', { boost: 2 })
this.field('name')
this.ref('short_name')
this.ref('id')
})
for (let emoji in data.emojis) {
let emojiData = data.emojis[emoji],
{ short_name, name } = emojiData
this.index.add({ short_name, name })
this.index.add({
id: short_name,
short_name: this.tokenize(short_name),
name: this.tokenize(name),
})
}
}
tokenize (string) {
if (['-', '-1', '+', '+1'].indexOf(string) == 0) {
return string.split('')
}
return string.split(/[-|_|\s]+/)
}
handleChange() {
var { input } = this.refs,
value = input.value,
results = null
if (value.length) {
results = this.index.search(value).map((result) =>
results = this.index.search(this.tokenize(value)).map((result) =>
result.ref
)
}