2017-01-25 02:43:02 +00:00
|
|
|
// Copyright (c) 2014 Couchbase, Inc.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package scorer
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"math"
|
2019-02-18 00:50:26 +00:00
|
|
|
"reflect"
|
2017-01-25 02:43:02 +00:00
|
|
|
|
|
|
|
"github.com/blevesearch/bleve/index"
|
|
|
|
"github.com/blevesearch/bleve/search"
|
2019-02-18 00:50:26 +00:00
|
|
|
"github.com/blevesearch/bleve/size"
|
2017-01-25 02:43:02 +00:00
|
|
|
)
|
|
|
|
|
2019-02-18 00:50:26 +00:00
|
|
|
var reflectStaticSizeTermQueryScorer int
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
var tqs TermQueryScorer
|
|
|
|
reflectStaticSizeTermQueryScorer = int(reflect.TypeOf(tqs).Size())
|
|
|
|
}
|
|
|
|
|
2017-01-25 02:43:02 +00:00
|
|
|
type TermQueryScorer struct {
|
2019-02-18 00:50:26 +00:00
|
|
|
queryTerm string
|
2017-01-25 02:43:02 +00:00
|
|
|
queryField string
|
|
|
|
queryBoost float64
|
|
|
|
docTerm uint64
|
|
|
|
docTotal uint64
|
|
|
|
idf float64
|
2017-09-16 20:16:21 +00:00
|
|
|
options search.SearcherOptions
|
2017-01-25 02:43:02 +00:00
|
|
|
idfExplanation *search.Explanation
|
2019-11-27 09:23:33 +00:00
|
|
|
includeScore bool
|
2017-01-25 02:43:02 +00:00
|
|
|
queryNorm float64
|
|
|
|
queryWeight float64
|
|
|
|
queryWeightExplanation *search.Explanation
|
|
|
|
}
|
|
|
|
|
2019-02-18 00:50:26 +00:00
|
|
|
func (s *TermQueryScorer) Size() int {
|
|
|
|
sizeInBytes := reflectStaticSizeTermQueryScorer + size.SizeOfPtr +
|
|
|
|
len(s.queryTerm) + len(s.queryField)
|
|
|
|
|
|
|
|
if s.idfExplanation != nil {
|
|
|
|
sizeInBytes += s.idfExplanation.Size()
|
|
|
|
}
|
|
|
|
|
|
|
|
if s.queryWeightExplanation != nil {
|
|
|
|
sizeInBytes += s.queryWeightExplanation.Size()
|
|
|
|
}
|
|
|
|
|
|
|
|
return sizeInBytes
|
|
|
|
}
|
|
|
|
|
2017-09-16 20:16:21 +00:00
|
|
|
func NewTermQueryScorer(queryTerm []byte, queryField string, queryBoost float64, docTotal, docTerm uint64, options search.SearcherOptions) *TermQueryScorer {
|
2017-01-25 02:43:02 +00:00
|
|
|
rv := TermQueryScorer{
|
2019-11-27 09:23:33 +00:00
|
|
|
queryTerm: string(queryTerm),
|
|
|
|
queryField: queryField,
|
|
|
|
queryBoost: queryBoost,
|
|
|
|
docTerm: docTerm,
|
|
|
|
docTotal: docTotal,
|
|
|
|
idf: 1.0 + math.Log(float64(docTotal)/float64(docTerm+1.0)),
|
|
|
|
options: options,
|
|
|
|
queryWeight: 1.0,
|
|
|
|
includeScore: options.Score != "none",
|
2017-01-25 02:43:02 +00:00
|
|
|
}
|
|
|
|
|
2017-09-16 20:16:21 +00:00
|
|
|
if options.Explain {
|
2017-01-25 02:43:02 +00:00
|
|
|
rv.idfExplanation = &search.Explanation{
|
|
|
|
Value: rv.idf,
|
|
|
|
Message: fmt.Sprintf("idf(docFreq=%d, maxDocs=%d)", docTerm, docTotal),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return &rv
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *TermQueryScorer) Weight() float64 {
|
|
|
|
sum := s.queryBoost * s.idf
|
|
|
|
return sum * sum
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *TermQueryScorer) SetQueryNorm(qnorm float64) {
|
|
|
|
s.queryNorm = qnorm
|
|
|
|
|
|
|
|
// update the query weight
|
|
|
|
s.queryWeight = s.queryBoost * s.idf * s.queryNorm
|
|
|
|
|
2017-09-16 20:16:21 +00:00
|
|
|
if s.options.Explain {
|
2017-01-25 02:43:02 +00:00
|
|
|
childrenExplanations := make([]*search.Explanation, 3)
|
|
|
|
childrenExplanations[0] = &search.Explanation{
|
|
|
|
Value: s.queryBoost,
|
|
|
|
Message: "boost",
|
|
|
|
}
|
|
|
|
childrenExplanations[1] = s.idfExplanation
|
|
|
|
childrenExplanations[2] = &search.Explanation{
|
|
|
|
Value: s.queryNorm,
|
|
|
|
Message: "queryNorm",
|
|
|
|
}
|
|
|
|
s.queryWeightExplanation = &search.Explanation{
|
|
|
|
Value: s.queryWeight,
|
2019-02-18 00:50:26 +00:00
|
|
|
Message: fmt.Sprintf("queryWeight(%s:%s^%f), product of:", s.queryField, s.queryTerm, s.queryBoost),
|
2017-01-25 02:43:02 +00:00
|
|
|
Children: childrenExplanations,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *TermQueryScorer) Score(ctx *search.SearchContext, termMatch *index.TermFieldDoc) *search.DocumentMatch {
|
2019-11-27 09:23:33 +00:00
|
|
|
rv := ctx.DocumentMatchPool.Get()
|
|
|
|
// perform any score computations only when needed
|
|
|
|
if s.includeScore || s.options.Explain {
|
|
|
|
var scoreExplanation *search.Explanation
|
|
|
|
var tf float64
|
|
|
|
if termMatch.Freq < MaxSqrtCache {
|
|
|
|
tf = SqrtCache[int(termMatch.Freq)]
|
|
|
|
} else {
|
|
|
|
tf = math.Sqrt(float64(termMatch.Freq))
|
2017-01-25 02:43:02 +00:00
|
|
|
}
|
2019-11-27 09:23:33 +00:00
|
|
|
score := tf * termMatch.Norm * s.idf
|
2017-01-25 02:43:02 +00:00
|
|
|
|
2017-09-16 20:16:21 +00:00
|
|
|
if s.options.Explain {
|
2019-11-27 09:23:33 +00:00
|
|
|
childrenExplanations := make([]*search.Explanation, 3)
|
|
|
|
childrenExplanations[0] = &search.Explanation{
|
|
|
|
Value: tf,
|
|
|
|
Message: fmt.Sprintf("tf(termFreq(%s:%s)=%d", s.queryField, s.queryTerm, termMatch.Freq),
|
|
|
|
}
|
|
|
|
childrenExplanations[1] = &search.Explanation{
|
|
|
|
Value: termMatch.Norm,
|
|
|
|
Message: fmt.Sprintf("fieldNorm(field=%s, doc=%s)", s.queryField, termMatch.ID),
|
|
|
|
}
|
|
|
|
childrenExplanations[2] = s.idfExplanation
|
2017-01-25 02:43:02 +00:00
|
|
|
scoreExplanation = &search.Explanation{
|
|
|
|
Value: score,
|
2019-11-27 09:23:33 +00:00
|
|
|
Message: fmt.Sprintf("fieldWeight(%s:%s in %s), product of:", s.queryField, s.queryTerm, termMatch.ID),
|
|
|
|
Children: childrenExplanations,
|
2017-01-25 02:43:02 +00:00
|
|
|
}
|
|
|
|
}
|
2019-11-27 09:23:33 +00:00
|
|
|
|
|
|
|
// if the query weight isn't 1, multiply
|
|
|
|
if s.queryWeight != 1.0 {
|
|
|
|
score = score * s.queryWeight
|
|
|
|
if s.options.Explain {
|
|
|
|
childExplanations := make([]*search.Explanation, 2)
|
|
|
|
childExplanations[0] = s.queryWeightExplanation
|
|
|
|
childExplanations[1] = scoreExplanation
|
|
|
|
scoreExplanation = &search.Explanation{
|
|
|
|
Value: score,
|
|
|
|
Message: fmt.Sprintf("weight(%s:%s^%f in %s), product of:", s.queryField, s.queryTerm, s.queryBoost, termMatch.ID),
|
|
|
|
Children: childExplanations,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if s.includeScore {
|
|
|
|
rv.Score = score
|
|
|
|
}
|
|
|
|
|
|
|
|
if s.options.Explain {
|
|
|
|
rv.Expl = scoreExplanation
|
|
|
|
}
|
2017-01-25 02:43:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
rv.IndexInternalID = append(rv.IndexInternalID, termMatch.ID...)
|
|
|
|
|
2019-02-18 00:50:26 +00:00
|
|
|
if len(termMatch.Vectors) > 0 {
|
|
|
|
if cap(rv.FieldTermLocations) < len(termMatch.Vectors) {
|
|
|
|
rv.FieldTermLocations = make([]search.FieldTermLocation, 0, len(termMatch.Vectors))
|
2017-09-16 20:16:21 +00:00
|
|
|
}
|
2017-01-25 02:43:02 +00:00
|
|
|
|
|
|
|
for _, v := range termMatch.Vectors {
|
2019-02-18 00:50:26 +00:00
|
|
|
var ap search.ArrayPositions
|
2017-01-25 02:43:02 +00:00
|
|
|
if len(v.ArrayPositions) > 0 {
|
2019-02-18 00:50:26 +00:00
|
|
|
n := len(rv.FieldTermLocations)
|
|
|
|
if n < cap(rv.FieldTermLocations) { // reuse ap slice if available
|
|
|
|
ap = rv.FieldTermLocations[:n+1][n].Location.ArrayPositions[:0]
|
2017-01-25 02:43:02 +00:00
|
|
|
}
|
2019-02-18 00:50:26 +00:00
|
|
|
ap = append(ap, v.ArrayPositions...)
|
2017-01-25 02:43:02 +00:00
|
|
|
}
|
2019-02-18 00:50:26 +00:00
|
|
|
rv.FieldTermLocations =
|
|
|
|
append(rv.FieldTermLocations, search.FieldTermLocation{
|
|
|
|
Field: v.Field,
|
|
|
|
Term: s.queryTerm,
|
|
|
|
Location: search.Location{
|
|
|
|
Pos: v.Pos,
|
|
|
|
Start: v.Start,
|
|
|
|
End: v.End,
|
|
|
|
ArrayPositions: ap,
|
|
|
|
},
|
|
|
|
})
|
2017-01-25 02:43:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return rv
|
|
|
|
}
|