fix: 降低词云阈值

This commit is contained in:
ikechan8370 2023-11-07 22:13:21 +08:00
parent a45be2e211
commit 56a893734c

View file

@ -95,15 +95,15 @@ export class Tokenizer {
.join('').trim() .join('').trim()
) )
.map(c => { .map(c => {
let length = c.length // let length = c.length
let threshold = 10 let threshold = 2
if (length < 100 && length > 50) { // if (length < 100 && length > 50) {
threshold = 6 // threshold = 6
} else if (length <= 50 && length > 25) { // } else if (length <= 50 && length > 25) {
threshold = 3 // threshold = 3
} else if (length <= 25) { // } else if (length <= 25) {
threshold = 2 // threshold = 2
} // }
return nodejieba.extract(c, threshold) return nodejieba.extract(c, threshold)
}) })
.reduce((acc, curr) => acc.concat(curr), []) .reduce((acc, curr) => acc.concat(curr), [])