diff --git a/README.md b/README.md index 22d476d..7e0eea7 100644 --- a/README.md +++ b/README.md @@ -76,7 +76,7 @@ func main() { ). Size(20). Run( - es, + es, es.Search.WithContext(context.TODO()), es.Search.WithIndex("test"), ) @@ -124,6 +124,7 @@ The following queries are currently supported: | `"match_phrase_prefix"` | `MatchPhrasePrefix()` | | `"match_all"` | `MatchAll()` | | `"match_none"` | `MatchNone()` | +| `"multi_match"` | `MultiMatch()` | | `"exists"` | `Exists()` | | `"fuzzy"` | `Fuzzy()` | | `"ids"` | `IDs()` | @@ -158,6 +159,23 @@ The following aggregations are currently supported: | `"top_hits"` | `TopHits()` | | `"terms"` | `TermsAgg()` | +### Supported Top Level Options + +The following top level options are currently supported: + +| ElasticSearch DSL | `esquery.Search` Function | +| ------------------------|--------------------------------------- | +| `"highlight"` | `Highlight()` | +| `"explain"` | `Explain()` | +| `"from"` | `From()` | +| `"postFilter"` | `PostFilter()` | +| `"query"` | `Query()` | +| `"aggs"` | `Aggs()` | +| `"size"` | `Size()` | +| `"sort"` | `Sort()` | +| `"source"` | `SourceIncludes(), SourceExcludes()` | +| `"timeout"` | `Timeout()` | + #### Custom Queries and Aggregations To execute an arbitrary query or aggregation (including those not yet supported by the library), use the `CustomQuery()` or `CustomAgg()` functions, respectively. Both accept any `map[string]interface{}` value. diff --git a/aggregations_test.go b/aggregations_test.go index d113f2d..39eeaa3 100644 --- a/aggregations_test.go +++ b/aggregations_test.go @@ -58,5 +58,39 @@ func TestAggregations(t *testing.T) { }, }, }, + { + "a complex, multi-aggregation, nested", + Aggregate( + NestedAgg("categories","categories"). + Aggs(TermsAgg("type","outdoors")), + FilterAgg("filtered", + Term("type", "t-shirt")), + ), + map[string]interface{}{ + "aggs": map[string]interface{}{ + "categories": map[string]interface{}{ + "nested": map[string]interface{}{ + "path": "categories", + }, + "aggs": map[string]interface{} { + "type": map[string]interface{} { + "terms": map[string]interface{} { + "field": "outdoors", + }, + }, + }, + }, + "filtered": map[string]interface{}{ + "filter": map[string]interface{}{ + "term": map[string]interface{}{ + "type": map[string]interface{} { + "value": "t-shirt", + }, + }, + }, + }, + }, + }, + }, }) } diff --git a/aggs_filter.go b/aggs_filter.go new file mode 100644 index 0000000..66770e2 --- /dev/null +++ b/aggs_filter.go @@ -0,0 +1,49 @@ +package esquery + +type FilterAggregation struct { + name string + filter Mappable + aggs []Aggregation +} + +// Filter creates a new aggregation of type "filter". The method name includes +// the "Agg" suffix to prevent conflict with the "filter" query. +func FilterAgg(name string, filter Mappable) *FilterAggregation { + return &FilterAggregation{ + name: name, + filter: filter, + } +} + +// Name returns the name of the aggregation. +func (agg *FilterAggregation) Name() string { + return agg.name +} + +// Filter sets the filter items +func (agg *FilterAggregation) Filter(filter Mappable) *FilterAggregation { + agg.filter = filter + return agg +} + +// Aggs sets sub-aggregations for the aggregation. +func (agg *FilterAggregation) Aggs(aggs ...Aggregation) *FilterAggregation { + agg.aggs = aggs + return agg +} + +func (agg *FilterAggregation) Map() map[string]interface{} { + outerMap := map[string]interface{}{ + "filter": agg.filter.Map(), + } + + if len(agg.aggs) > 0 { + subAggs := make(map[string]map[string]interface{}) + for _, sub := range agg.aggs { + subAggs[sub.Name()] = sub.Map() + } + outerMap["aggs"] = subAggs + } + + return outerMap +} diff --git a/aggs_filter_test.go b/aggs_filter_test.go new file mode 100644 index 0000000..3c702a2 --- /dev/null +++ b/aggs_filter_test.go @@ -0,0 +1,42 @@ +package esquery + +import "testing" + +func TestFilterAggs(t *testing.T) { + runMapTests(t, []mapTest{ + { + "filter agg: simple", + FilterAgg("filtered", Term("type", "t-shirt")), + map[string]interface{}{ + "filter": map[string]interface{}{ + "term": map[string]interface{}{ + "type": map[string]interface{}{ + "value": "t-shirt", + }, + }, + }, + }, + }, + { + "filter agg: with aggs", + FilterAgg("filtered", Term("type", "t-shirt")). + Aggs(Avg("avg_price", "price")), + map[string]interface{}{ + "filter": map[string]interface{}{ + "term": map[string]interface{}{ + "type": map[string]interface{}{ + "value": "t-shirt", + }, + }, + }, + "aggs": map[string]interface{}{ + "avg_price": map[string]interface{}{ + "avg": map[string]interface{}{ + "field": "price", + }, + }, + }, + }, + }, + }) +} diff --git a/aggs_nested.go b/aggs_nested.go new file mode 100644 index 0000000..9b262a9 --- /dev/null +++ b/aggs_nested.go @@ -0,0 +1,53 @@ +package esquery + +type NestedAggregation struct { + name string + path string + aggs []Aggregation +} + +// NestedAgg creates a new aggregation of type "nested". The method name includes +// the "Agg" suffix to prevent conflict with the "nested" query. +func NestedAgg(name string, path string) *NestedAggregation { + return &NestedAggregation{ + name: name, + path: path, + } +} + +// Name returns the name of the aggregation. +func (agg *NestedAggregation) Name() string { + return agg.name +} + +// NumberOfFragments sets the aggregations path +func (agg *NestedAggregation) Path(p string) *NestedAggregation { + agg.path = p + return agg +} + +// Aggs sets sub-aggregations for the aggregation. +func (agg *NestedAggregation) Aggs(aggs ...Aggregation) *NestedAggregation { + agg.aggs = aggs + return agg +} + +func (agg *NestedAggregation) Map() map[string]interface{} { + innerMap := map[string]interface{}{ + "path": agg.path, + } + + outerMap := map[string]interface{}{ + "nested": innerMap, + } + + if len(agg.aggs) > 0 { + subAggs := make(map[string]map[string]interface{}) + for _, sub := range agg.aggs { + subAggs[sub.Name()] = sub.Map() + } + outerMap["aggs"] = subAggs + } + + return outerMap +} diff --git a/aggs_nested_test.go b/aggs_nested_test.go new file mode 100644 index 0000000..87c014b --- /dev/null +++ b/aggs_nested_test.go @@ -0,0 +1,34 @@ +package esquery + +import "testing" + +func TestNestedAggs(t *testing.T) { + runMapTests(t, []mapTest{ + { + "nested agg: simple", + NestedAgg("simple", "categories"), + map[string]interface{}{ + "nested": map[string]interface{}{ + "path": "categories", + }, + }, + }, + { + "nested agg: with aggs", + NestedAgg("more_nested", "authors"). + Aggs(TermsAgg("authors", "name")), + map[string]interface{}{ + "nested": map[string]interface{}{ + "path": "authors", + }, + "aggs": map[string]interface{}{ + "authors": map[string]interface{}{ + "terms": map[string]interface{}{ + "field": "name", + }, + }, + }, + }, + }, + }) +} diff --git a/highlight.go b/highlight.go new file mode 100644 index 0000000..855f114 --- /dev/null +++ b/highlight.go @@ -0,0 +1,341 @@ +package esquery + +import ( + "github.com/fatih/structs" +) + +// Map returns a map representation of the highlight; implementing the +// Mappable interface. +func (q *QueryHighlight) Map() map[string]interface{} { + results := structs.Map(q.params) + if q.highlightQuery != nil { + results["query"] = q.highlightQuery.Map() + } + if q.fields != nil && len(q.fields) > 0 { + fields := make(map[string]interface{}) + for k, v := range q.fields { + fields[k] = v.Map() + } + results["fields"] = fields + } + return results +} + +type QueryHighlight struct { + highlightQuery Mappable `structs:"highlight_query,omitempty"` + fields map[string]*QueryHighlight `structs:"fields"` + params highlighParams +} + +type highlighParams struct { + PreTags []string `structs:"pre_tags,omitempty"` + PostTags []string `structs:"post_tags,omitempty"` + + FragmentSize uint16 `structs:"fragment_size,omitempty"` + NumberOfFragments uint16 `structs:"number_of_fragments,omitempty"` + Type HighlightType `structs:"type,string,omitempty"` + BoundaryChars string `structs:"boundary_chars,omitempty"` + BoundaryMaxScan uint16 `structs:"boundary_max_scan,omitempty"` + BoundaryScanner HighlightBoundaryScanner `structs:"boundary_scanner,string,omitempty"` + BoundaryScannerLocale string `structs:"boundary_scanner_locale,omitempty"` + Encoder HighlightEncoder `structs:"encoder,string,omitempty"` + ForceSource *bool `structs:"force_source,omitempty"` + Fragmenter HighlightFragmenter `structs:"fragmenter,string,omitempty"` + FragmentOffset uint16 `structs:"fragment_offset,omitempty"` + MatchedFields []string `structs:"matched_fields,omitempty"` + NoMatchSize uint16 `structs:"no_match_size,omitempty"` + Order HighlightOrder `structs:"order,string,omitempty"` + PhraseLimit uint16 `structs:"phrase_limit,omitempty"` + RequireFieldMatch *bool `structs:"require_field_match,omitempty"` + TagsSchema HighlightTagsSchema `structs:"tags_schema,string,omitempty"` +} + +// Highlight creates a new "query" of type "highlight" +func Highlight() *QueryHighlight { + return newHighlight() +} + +func newHighlight() *QueryHighlight { + return &QueryHighlight{ + fields: make(map[string]*QueryHighlight), + params: highlighParams{}, + } +} + +// PreTags sets the highlight query's pre_tags ignore unmapped field +func (q *QueryHighlight) PreTags(s ...string) *QueryHighlight { + q.params.PreTags = append(q.params.PreTags, s...) + return q +} + +// PostTags sets the highlight query's post_tags ignore unmapped field +func (q *QueryHighlight) PostTags(s ...string) *QueryHighlight { + q.params.PostTags = append(q.params.PostTags, s...) + return q +} + +// Field sets an entry the highlight query's fields +func (q *QueryHighlight) Field(name string, h ...*QueryHighlight) *QueryHighlight { + var fld *QueryHighlight + if len(h) > 0 { + fld = h[len(h)-1] + } else { + fld = &QueryHighlight{} + } + q.fields[name] = fld + return q +} + +// Fields sets all entries for the highlight query's fields +func (q *QueryHighlight) Fields(h map[string]*QueryHighlight) *QueryHighlight { + q.fields = h + return q +} + +// FragmentSize sets the highlight query's fragment_size ignore unmapped field +func (q *QueryHighlight) FragmentSize(i uint16) *QueryHighlight { + q.params.FragmentSize = i + return q +} + +// NumberOfFragments sets the highlight query's number_of_fragments ignore unmapped field +func (q *QueryHighlight) NumberOfFragments(i uint16) *QueryHighlight { + q.params.NumberOfFragments = i + return q +} + +// Type sets the highlight query's type ignore unmapped field +func (q *QueryHighlight) Type(t HighlightType) *QueryHighlight { + q.params.Type = t + return q +} + +// BoundaryChars sets the highlight query's boundary_chars ignore unmapped field +func (q *QueryHighlight) BoundaryChars(s string) *QueryHighlight { + q.params.BoundaryChars = s + return q +} + +// BoundaryMaxScan sets the highlight query's boundary_max_scan ignore unmapped field +func (q *QueryHighlight) BoundaryMaxScan(i uint16) *QueryHighlight { + q.params.BoundaryMaxScan = i + return q +} + +// BoundaryScanner sets the highlight query's boundary_scanner ignore unmapped field +func (q *QueryHighlight) BoundaryScanner(t HighlightBoundaryScanner) *QueryHighlight { + q.params.BoundaryScanner = t + return q +} + +// BoundaryScannerLocale sets the highlight query's boundary_scanner_locale ignore unmapped field +func (q *QueryHighlight) BoundaryScannerLocale(l string) *QueryHighlight { + q.params.BoundaryScannerLocale = l + return q +} + +// Encoder sets the highlight query's encoder ignore unmapped field +func (q *QueryHighlight) Encoder(e HighlightEncoder) *QueryHighlight { + q.params.Encoder = e + return q +} + +// ForceSource sets the highlight query's force_source ignore unmapped field +func (q *QueryHighlight) ForceSource(b bool) *QueryHighlight { + q.params.ForceSource = &b + return q +} + +// Fragmenter sets the highlight query's fragmenter ignore unmapped field +func (q *QueryHighlight) Fragmenter(f HighlightFragmenter) *QueryHighlight { + q.params.Fragmenter = f + return q +} + +// FragmentOffset sets the highlight query's fragment_offset ignore unmapped field +func (q *QueryHighlight) FragmentOffset(i uint16) *QueryHighlight { + q.params.FragmentOffset = i + return q +} + +// HighlightQuery sets the highlight query's highlight_query ignore unmapped field +func (q *QueryHighlight) HighlightQuery(b Mappable) *QueryHighlight { + q.highlightQuery = b + return q +} + +// MatchedFields sets the highlight query's matched_fields ignore unmapped field +func (q *QueryHighlight) MatchedFields(s ...string) *QueryHighlight { + q.params.MatchedFields = append(q.params.MatchedFields, s...) + return q +} + +// NoMatchSize sets the highlight query's no_match_size ignore unmapped field +func (q *QueryHighlight) NoMatchSize(i uint16) *QueryHighlight { + q.params.NoMatchSize = i + return q +} + +// Order sets the nested highlight's score order unmapped field +func (q *QueryHighlight) Order(o HighlightOrder) *QueryHighlight { + q.params.Order = o + return q +} + +// PhraseLimit sets the highlight query's phrase_limit ignore unmapped field +func (q *QueryHighlight) PhraseLimit(i uint16) *QueryHighlight { + q.params.PhraseLimit = i + return q +} + +// RequireFieldMatch sets the highlight query's require_field_match ignore unmapped field +func (q *QueryHighlight) RequireFieldMatch(b bool) *QueryHighlight { + q.params.RequireFieldMatch = &b + return q +} + +// TagsSchema sets the highlight query's tags_schema ignore unmapped field +func (q *QueryHighlight) TagsSchema(s HighlightTagsSchema) *QueryHighlight { + q.params.TagsSchema = s + return q +} + +type HighlightType uint8 + +const ( + // HighlighterUnified is the "unified" value + HighlighterUnified HighlightType = iota + + // HighlighterPlain is the "plain" value + HighlighterPlain + + // HighlighterFvh is the "fvh" value + HighlighterFvh +) + +// String returns a string representation of the type parameter, as +// known to ElasticSearch. +func (a HighlightType) String() string { + switch a { + case HighlighterUnified: + return "unified" + case HighlighterPlain: + return "plain" + case HighlighterFvh: + return "fvh" + } + return "" +} + +type HighlightBoundaryScanner uint8 + +const ( + BoundaryScannerDefault HighlightBoundaryScanner = iota + + // BoundaryScannerChars is the "chars" value + BoundaryScannerChars + + // BoundaryScannerSentence is the "sentence" value + BoundaryScannerSentence + + // BoundaryScannerWord is the "word" value + BoundaryScannerWord +) + +// String returns a string representation of the boundary_scanner parameter, as +// known to ElasticSearch. +func (a HighlightBoundaryScanner) String() string { + switch a { + case BoundaryScannerChars: + return "chars" + case BoundaryScannerSentence: + return "sentence" + case BoundaryScannerWord: + return "word" + } + return "" +} + +type HighlightEncoder uint8 + +const ( + // EncoderDefault is the "default" value + EncoderDefault HighlightEncoder = iota + + // EncoderHtml is the "html" value + EncoderHtml +) + +// String returns a string representation of the encoder parameter, as +// known to ElasticSearch. +func (a HighlightEncoder) String() string { + switch a { + case EncoderDefault: + return "default" + case EncoderHtml: + return "html" + } + return "" +} + +type HighlightFragmenter uint8 + +const ( + // FragmentSpan is the "span" value + FragmenterSpan HighlightFragmenter = iota + + // FragmenterSimple is the "simple" value + FragmenterSimple +) + +// String returns a string representation of the fragmenter parameter, as +// known to ElasticSearch. +func (a HighlightFragmenter) String() string { + switch a { + case FragmenterSpan: + return "span" + case FragmenterSimple: + return "simple" + } + return "" +} + +type HighlightOrder uint8 + +const ( + // OrderNone is the "none" value + OrderNone HighlightOrder = iota + + // OrderScore is the "score" value + OrderScore +) + +// String returns a string representation of the order parameter, as +// known to ElasticSearch. +func (a HighlightOrder) String() string { + switch a { + case OrderNone: + return "none" + case OrderScore: + return "score" + } + return "" +} + +type HighlightTagsSchema uint8 + +const ( + TagsSchemaDefault HighlightTagsSchema = iota + // TagsSchemaStyled is the "styled" value + TagsSchemaStyled +) + +// String returns a string representation of the tags_schema parameter, as +// known to ElasticSearch. +func (a HighlightTagsSchema) String() string { + switch a { + case TagsSchemaStyled: + return "styled" + } + return "" +} diff --git a/highlight_test.go b/highlight_test.go new file mode 100644 index 0000000..ac95c57 --- /dev/null +++ b/highlight_test.go @@ -0,0 +1,93 @@ +package esquery + +import ( + "testing" +) + +func TestHighlight(t *testing.T) { + runMapTests(t, []mapTest{ + { + "simple highlight", + Highlight().Field("content"), + map[string]interface{}{ + "fields": map[string]interface{}{ + "content": map[string]interface{}{}, + }, + }, + }, + { + "highlight all params", + Highlight(). + PreTags("
", "").
+ PostTags("
", "
").
+ Field("content",
+ Highlight().
+ BoundaryChars(".;,")).
+ FragmentSize(150).
+ NumberOfFragments(4).
+ Type(HighlighterPlain).
+ BoundaryChars("()[]").
+ BoundaryMaxScan(32).
+ BoundaryScanner(BoundaryScannerChars).
+ BoundaryScannerLocale("en-US").
+ Encoder(EncoderHtml).
+ ForceSource(true).
+ Fragmenter(FragmenterSimple).
+ FragmentOffset(6).
+ HighlightQuery(
+ Bool().
+ Must(
+ Match("author").
+ Query("some guy").
+ Analyzer("analyzer?").
+ Fuzziness("fuzz"))).
+ MatchedFields("title", "body").
+ NoMatchSize(64).
+ Order(OrderScore).
+ PhraseLimit(512).
+ RequireFieldMatch(false).
+ TagsSchema(TagsSchemaStyled),
+ map[string]interface{}{
+ "pre_tags": []string{"", ""},
+ "post_tags": []string{"
", "
"},
+ "fragment_size": 150,
+ "number_of_fragments": 4,
+ "type": "plain",
+ "boundary_chars": "()[]",
+ "boundary_scanner": "chars",
+ "boundary_max_scan": 32,
+ "boundary_scanner_locale": "en-US",
+ "encoder": "html",
+ "force_source": true,
+ "fragment_offset": 6,
+ "fragmenter": "simple",
+ "matched_fields": []string{"title", "body"},
+ "no_match_size": 64,
+ "order": "score",
+ "phrase_limit": 512,
+ "require_field_match": false,
+ "tags_schema": "styled",
+ "fields": map[string]interface{}{
+ "content": map[string]interface{}{
+ "boundary_chars": ".;,",
+ },
+ },
+ "query": map[string]interface{}{
+ "bool": map[string]interface{}{
+ "must": []map[string]interface{}{
+ {
+ "match": map[string]interface{}{
+ "author": map[string]interface{}{
+ "analyzer": "analyzer?",
+ "fuzziness": "fuzz",
+ "query": "some guy",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ })
+}
diff --git a/query_multi_match.go b/query_multi_match.go
new file mode 100644
index 0000000..0d5f2a2
--- /dev/null
+++ b/query_multi_match.go
@@ -0,0 +1,210 @@
+package esquery
+
+import (
+ "github.com/fatih/structs"
+)
+
+type MultiMatchQuery struct {
+ params multiMatchParams
+}
+
+// Map returns a map representation of the query; implementing the
+// Mappable interface.
+func (q *MultiMatchQuery) Map() map[string]interface{} {
+ return map[string]interface{}{
+ "multi_match": structs.Map(q.params),
+ }
+}
+
+type multiMatchParams struct {
+ Qry interface{} `structs:"query"`
+ Fields []string `structs:"fields"`
+ Type MultiMatchType `structs:"type,string,omitempty"`
+ TieBrk float32 `structs:"tie_breaker,omitempty"`
+ Boost float32 `structs:"boost,omitempty"`
+ Anl string `structs:"analyzer,omitempty"`
+ AutoGenerate *bool `structs:"auto_generate_synonyms_phrase_query,omitempty"`
+ Fuzz string `structs:"fuzziness,omitempty"`
+ MaxExp uint16 `structs:"max_expansions,omitempty"`
+ PrefLen uint16 `structs:"prefix_length,omitempty"`
+ Trans *bool `structs:"transpositions,omitempty"`
+ FuzzyRw string `structs:"fuzzy_rewrite,omitempty"`
+ Lent *bool `structs:"lenient,omitempty"`
+ Op MatchOperator `structs:"operator,string,omitempty"`
+ MinMatch string `structs:"minimum_should_match,omitempty"`
+ ZeroTerms ZeroTerms `structs:"zero_terms_query,string,omitempty"`
+ Slp uint16 `structs:"slop,omitempty"`
+}
+
+// MultiMatch creates a new query of type "multi_match"
+func MultiMatch(simpleQuery ...interface{}) *MultiMatchQuery {
+ return newMultiMatch(simpleQuery...)
+}
+
+func newMultiMatch(simpleQuery ...interface{}) *MultiMatchQuery {
+ var qry interface{}
+ if len(simpleQuery) > 0 {
+ qry = simpleQuery[len(simpleQuery)-1]
+ }
+
+ return &MultiMatchQuery{
+ params: multiMatchParams{
+ Qry: qry,
+ },
+ }
+}
+
+// Query sets the data to find in the query's field (it is the "query" component
+// of the query).
+func (q *MultiMatchQuery) Query(data interface{}) *MultiMatchQuery {
+ q.params.Qry = data
+ return q
+}
+
+// Analyzer sets the analyzer used to convert the text in the "query" value into
+// tokens.
+func (q *MultiMatchQuery) Analyzer(a string) *MultiMatchQuery {
+ q.params.Anl = a
+ return q
+}
+
+// Fields sets the fields used in the query
+func (q *MultiMatchQuery) Fields(a ...string) *MultiMatchQuery {
+ q.params.Fields = append(q.params.Fields, a...)
+ return q
+}
+
+// AutoGenerateSynonymsPhraseQuery sets the "auto_generate_synonyms_phrase_query"
+// boolean.
+func (q *MultiMatchQuery) AutoGenerateSynonymsPhraseQuery(b bool) *MultiMatchQuery {
+ q.params.AutoGenerate = &b
+ return q
+}
+
+// Fuzziness set the maximum edit distance allowed for matching.
+func (q *MultiMatchQuery) Fuzziness(f string) *MultiMatchQuery {
+ q.params.Fuzz = f
+ return q
+}
+
+// MaxExpansions sets the maximum number of terms to which the query will expand.
+func (q *MultiMatchQuery) MaxExpansions(e uint16) *MultiMatchQuery {
+ q.params.MaxExp = e
+ return q
+}
+
+// PrefixLength sets the number of beginning characters left unchanged for fuzzy
+// matching.
+func (q *MultiMatchQuery) PrefixLength(l uint16) *MultiMatchQuery {
+ q.params.PrefLen = l
+ return q
+}
+
+// TieBreaker
+func (q *MultiMatchQuery) TieBreaker(l float32) *MultiMatchQuery {
+ q.params.TieBrk = l
+ return q
+}
+
+// Boost
+func (q *MultiMatchQuery) Boost(l float32) *MultiMatchQuery {
+ q.params.Boost = l
+ return q
+}
+
+// Transpositions sets whether edits for fuzzy matching include transpositions
+// of two adjacent characters.
+func (q *MultiMatchQuery) Transpositions(b bool) *MultiMatchQuery {
+ q.params.Trans = &b
+ return q
+}
+
+// FuzzyRewrite sets the method used to rewrite the query.
+func (q *MultiMatchQuery) FuzzyRewrite(s string) *MultiMatchQuery {
+ q.params.FuzzyRw = s
+ return q
+}
+
+// Lenient sets whether format-based errors should be ignored.
+func (q *MultiMatchQuery) Lenient(b bool) *MultiMatchQuery {
+ q.params.Lent = &b
+ return q
+}
+
+// Operator sets the boolean logic used to interpret text in the query value.
+func (q *MultiMatchQuery) Operator(op MatchOperator) *MultiMatchQuery {
+ q.params.Op = op
+ return q
+}
+
+// Type sets the query type
+func (q *MultiMatchQuery) Type(t MultiMatchType) *MultiMatchQuery {
+ q.params.Type = t
+ return q
+}
+
+// MinimumShouldMatch sets the minimum number of clauses that must match for a
+// document to be returned.
+func (q *MultiMatchQuery) MinimumShouldMatch(s string) *MultiMatchQuery {
+ q.params.MinMatch = s
+ return q
+}
+
+// Slop sets the maximum number of positions allowed between matching tokens.
+func (q *MultiMatchQuery) Slop(n uint16) *MultiMatchQuery {
+ q.params.Slp = n
+ return q
+}
+
+// ZeroTermsQuery sets the "zero_terms_query" option to use. This indicates
+// whether no documents are returned if the analyzer removes all tokens, such as
+// when using a stop filter.
+func (q *MultiMatchQuery) ZeroTermsQuery(s ZeroTerms) *MultiMatchQuery {
+ q.params.ZeroTerms = s
+ return q
+}
+
+// MatchType is an enumeration type representing supported values for a
+// multi match query's "type" parameter.
+type MultiMatchType uint8
+
+const (
+ // TypeBestFields is the "best_fields" type
+ MatchTypeBestFields MultiMatchType = iota
+
+ // TypeMostFields is the "most_fields" type
+ MatchTypeMostFields
+
+ // TypeMostFields is the "cross_fields" type
+ MatchTypeCrossFields
+
+ // TypeMostFields is the "phrase" type
+ MatchTypePhrase
+
+ // TypeMostFields is the "phrase_prefix" type
+ MatchTypePhrasePrefix
+
+ // TypeMostFields is the "bool_prefix" type
+ MatchTypeBoolPrefix
+)
+
+// String returns a string representation of the match operator, as known to
+// ElasticSearch.
+func (a MultiMatchType) String() string {
+ switch a {
+ case MatchTypeBestFields:
+ return "best_fields"
+ case MatchTypeMostFields:
+ return "most_fields"
+ case MatchTypeCrossFields:
+ return "cross_fields"
+ case MatchTypePhrase:
+ return "phrase"
+ case MatchTypePhrasePrefix:
+ return "phrase_prefix"
+ case MatchTypeBoolPrefix:
+ return "bool_prefix"
+ default:
+ return ""
+ }
+}
diff --git a/query_multi_match_test.go b/query_multi_match_test.go
new file mode 100644
index 0000000..084f666
--- /dev/null
+++ b/query_multi_match_test.go
@@ -0,0 +1,62 @@
+package esquery
+
+import (
+ "testing"
+)
+
+func TestMultiMatch(t *testing.T) {
+ runMapTests(t, []mapTest{
+ {
+ "simple multi_match",
+ MultiMatch("value1", "value2").Fields("title"),
+ map[string]interface{}{
+ "multi_match": map[string]interface{}{
+ "fields": []string{"title"},
+ "query": "value2",
+ },
+ },
+ },
+ {
+ "multi_match all params",
+ MultiMatch("original").
+ Query("test").
+ Analyzer("stop").
+ Fields("title", "body").
+ AutoGenerateSynonymsPhraseQuery(true).
+ Fuzziness("AUTO").
+ MaxExpansions(16).
+ PrefixLength(12).
+ TieBreaker(0.3).
+ Boost(6.4).
+ Transpositions(true).
+ FuzzyRewrite("scoring_boolean").
+ Lenient(true).
+ Operator(OperatorAnd).
+ Type(MatchTypePhrase).
+ MinimumShouldMatch("3<90%").
+ Slop(2).
+ ZeroTermsQuery(ZeroTermsAll),
+ map[string]interface{}{
+ "multi_match": map[string]interface{}{
+ "analyzer": "stop",
+ "auto_generate_synonyms_phrase_query": true,
+ "boost": 6.4,
+ "fuzziness": "AUTO",
+ "fuzzy_rewrite": "scoring_boolean",
+ "lenient": true,
+ "max_expansions": 16,
+ "minimum_should_match": "3<90%",
+ "prefix_length": 12,
+ "transpositions": true,
+ "type": "phrase",
+ "tie_breaker": 0.3,
+ "operator": "AND",
+ "zero_terms_query": "all",
+ "slop": 2,
+ "query": "test",
+ "fields": []string{"title", "body"},
+ },
+ },
+ },
+ })
+}
diff --git a/search.go b/search.go
index 3da937e..39dabf6 100644
--- a/search.go
+++ b/search.go
@@ -15,15 +15,16 @@ import (
// Not all features of the search API are currently supported, but a request can
// currently include a query, aggregations, and more.
type SearchRequest struct {
- query Mappable
aggs []Aggregation
- postFilter Mappable
- from *uint64
- size *uint64
explain *bool
- timeout *time.Duration
- source Source
+ from *uint64
+ highlight Mappable
+ postFilter Mappable
+ query Mappable
+ size *uint64
sort Sort
+ source Source
+ timeout *time.Duration
}
// Search creates a new SearchRequest object, to be filled via method chaining.
@@ -98,6 +99,13 @@ func (req *SearchRequest) SourceExcludes(keys ...string) *SearchRequest {
return req
}
+// Highlight sets a highlight for the request.
+func (req *SearchRequest) Highlight(highlight Mappable) *SearchRequest {
+ req.highlight = highlight
+ return req
+}
+
+
// Map implements the Mappable interface. It converts the request to into a
// nested map[string]interface{}, as expected by the go-elasticsearch library.
func (req *SearchRequest) Map() map[string]interface{} {
@@ -131,6 +139,9 @@ func (req *SearchRequest) Map() map[string]interface{} {
if req.timeout != nil {
m["timeout"] = fmt.Sprintf("%.0fs", req.timeout.Seconds())
}
+ if req.highlight != nil {
+ m["highlight"] = req.highlight.Map()
+ }
source := req.source.Map()
if len(source) > 0 {