From cf3d8d582757d44b5953d1b15fc120205f1f8b80 Mon Sep 17 00:00:00 2001 From: Caleb Champlin Date: Sat, 17 Oct 2020 13:41:45 -0600 Subject: [PATCH 01/15] Add support for multi_match queries --- query_multi_match.go | 210 ++++++++++++++++++++++++++++++++++++++ query_multi_match_test.go | 62 +++++++++++ 2 files changed, 272 insertions(+) create mode 100644 query_multi_match.go create mode 100644 query_multi_match_test.go diff --git a/query_multi_match.go b/query_multi_match.go new file mode 100644 index 0000000..d1009ed --- /dev/null +++ b/query_multi_match.go @@ -0,0 +1,210 @@ +package esquery + +import ( + "github.com/fatih/structs" +) + +type MultiMatchQuery struct { + params multiMatchParams +} + +// Map returns a map representation of the query; implementing the +// Mappable interface. +func (q *MultiMatchQuery) Map() map[string]interface{} { + return map[string]interface{}{ + "multi_match": structs.Map(q.params), + } +} + +type multiMatchParams struct { + Qry interface{} `structs:"query"` + Fields []string `structs:"fields"` + Type MultiMatchType `structs:"type,string,omitempty"` + TieBrk float32 `structs:"tie_breaker,omitempty"` + Boost float32 `structs:"boost,omitempty"` + Anl string `structs:"analyzer,omitempty"` + AutoGenerate *bool `structs:"auto_generate_synonyms_phrase_query,omitempty"` + Fuzz string `structs:"fuzziness,omitempty"` + MaxExp uint16 `structs:"max_expansions,omitempty"` + PrefLen uint16 `structs:"prefix_length,omitempty"` + Trans *bool `structs:"transpositions,omitempty"` + FuzzyRw string `structs:"fuzzy_rewrite,omitempty"` + Lent *bool `structs:"lenient,omitempty"` + Op MatchOperator `structs:"operator,string,omitempty"` + MinMatch string `structs:"minimum_should_match,omitempty"` + ZeroTerms ZeroTerms `structs:"zero_terms_query,string,omitempty"` + Slp uint16 `structs:"slop,omitempty"` +} + +// MultiMatch creates a new query of type "multi_match" +func MultiMatch(simpleQuery ...interface{}) *MultiMatchQuery { + return newMultiMatch(simpleQuery...) +} + +func newMultiMatch(simpleQuery ...interface{}) *MultiMatchQuery { + var qry interface{} + if len(simpleQuery) > 0 { + qry = simpleQuery[len(simpleQuery)-1] + } + + return &MultiMatchQuery{ + params: multiMatchParams{ + Qry: qry, + }, + } +} + +// Query sets the data to find in the query's field (it is the "query" component +// of the query). +func (q *MultiMatchQuery) Query(data interface{}) *MultiMatchQuery { + q.params.Qry = data + return q +} + +// Analyzer sets the analyzer used to convert the text in the "query" value into +// tokens. +func (q *MultiMatchQuery) Analyzer(a string) *MultiMatchQuery { + q.params.Anl = a + return q +} + +// Fields sets the fields used in the query +func (q *MultiMatchQuery) Fields(a ...string) *MultiMatchQuery { + q.params.Fields = append(q.params.Fields, a...) + return q +} + +// AutoGenerateSynonymsPhraseQuery sets the "auto_generate_synonyms_phrase_query" +// boolean. +func (q *MultiMatchQuery) AutoGenerateSynonymsPhraseQuery(b bool) *MultiMatchQuery { + q.params.AutoGenerate = &b + return q +} + +// Fuzziness set the maximum edit distance allowed for matching. +func (q *MultiMatchQuery) Fuzziness(f string) *MultiMatchQuery { + q.params.Fuzz = f + return q +} + +// MaxExpansions sets the maximum number of terms to which the query will expand. +func (q *MultiMatchQuery) MaxExpansions(e uint16) *MultiMatchQuery { + q.params.MaxExp = e + return q +} + +// PrefixLength sets the number of beginning characters left unchanged for fuzzy +// matching. +func (q *MultiMatchQuery) PrefixLength(l uint16) *MultiMatchQuery { + q.params.PrefLen = l + return q +} + +// TieBreaker +func (q *MultiMatchQuery) TieBreaker(l float32) *MultiMatchQuery { + q.params.TieBrk = l + return q +} + +// Boost +func (q *MultiMatchQuery) Boost(l float32) *MultiMatchQuery { + q.params.Boost = l + return q +} + +// Transpositions sets whether edits for fuzzy matching include transpositions +// of two adjacent characters. +func (q *MultiMatchQuery) Transpositions(b bool) *MultiMatchQuery { + q.params.Trans = &b + return q +} + +// FuzzyRewrite sets the method used to rewrite the query. +func (q *MultiMatchQuery) FuzzyRewrite(s string) *MultiMatchQuery { + q.params.FuzzyRw = s + return q +} + +// Lenient sets whether format-based errors should be ignored. +func (q *MultiMatchQuery) Lenient(b bool) *MultiMatchQuery { + q.params.Lent = &b + return q +} + +// Operator sets the boolean logic used to interpret text in the query value. +func (q *MultiMatchQuery) Operator(op MatchOperator) *MultiMatchQuery { + q.params.Op = op + return q +} + +// Type sets the query type +func (q *MultiMatchQuery) Type(t MultiMatchType) *MultiMatchQuery { + q.params.Type = t + return q +} + +// MinimumShouldMatch sets the minimum number of clauses that must match for a +// document to be returned. +func (q *MultiMatchQuery) MinimumShouldMatch(s string) *MultiMatchQuery { + q.params.MinMatch = s + return q +} + +// Slop sets the maximum number of positions allowed between matching tokens. +func (q *MultiMatchQuery) Slop(n uint16) *MultiMatchQuery { + q.params.Slp = n + return q +} + +// ZeroTermsQuery sets the "zero_terms_query" option to use. This indicates +// whether no documents are returned if the analyzer removes all tokens, such as +// when using a stop filter. +func (q *MultiMatchQuery) ZeroTermsQuery(s ZeroTerms) *MultiMatchQuery { + q.params.ZeroTerms = s + return q +} + +// MatchType is an enumeration type representing supported values for a +// multi match query's "type" parameter. +type MultiMatchType uint8 + +const ( + // TypeBestFields is the "best_fields" type + MatchTypeBestFields MultiMatchType = iota + + // TypeMostFields is the "most_fields" type + MatchTypeMostFields + + // TypeMostFields is the "cross_fields" type + MatchTypeCrossFields + + // TypeMostFields is the "phrase" type + MatchTypePhrase + + // TypeMostFields is the "phrase_prefix" type + MatchTypePhrasePrefix + + // TypeMostFields is the "bool_prefix" type + MatchTypeBoolPrefix +) + +// String returns a string representation of the match operator, as known to +// ElasticSearch. +func (a MultiMatchType) String() string { + switch a { + case MatchTypeBestFields: + return "best_fields" + case MatchTypeMostFields: + return "most_fields" + case MatchTypeCrossFields: + return "cross_fields" + case MatchTypePhrase: + return "phrase" + case MatchTypePhrasePrefix: + return "phrase_prefix" + case MatchTypeBoolPrefix: + return "bool_prefix" + default: + return "" + } +} diff --git a/query_multi_match_test.go b/query_multi_match_test.go new file mode 100644 index 0000000..1f61caa --- /dev/null +++ b/query_multi_match_test.go @@ -0,0 +1,62 @@ +package esquery + +import ( + "testing" +) + +func TestMultiMatch(t *testing.T) { + runMapTests(t, []mapTest{ + { + "simple multi_match", + MultiMatch("value1","value2").Fields("title"), + map[string]interface{}{ + "multi_match": map[string]interface{}{ + "fields": []string{"title"}, + "query": "value2", + }, + }, + }, + { + "multi_match all params", + MultiMatch("original"). + Query("test"). + Analyzer("stop"). + Fields("title","body"). + AutoGenerateSynonymsPhraseQuery(true). + Fuzziness("AUTO"). + MaxExpansions(16). + PrefixLength(12). + TieBreaker(0.3). + Boost(6.4). + Transpositions(true). + FuzzyRewrite("scoring_boolean"). + Lenient(true). + Operator(OperatorAnd). + Type(MatchTypePhrase). + MinimumShouldMatch("3<90%"). + Slop(2). + ZeroTermsQuery(ZeroTermsAll), + map[string]interface{}{ + "multi_match": map[string]interface{}{ + "analyzer": "stop", + "auto_generate_synonyms_phrase_query": true, + "boost": 6.4, + "fuzziness": "AUTO", + "fuzzy_rewrite": "scoring_boolean", + "lenient": true, + "max_expansions": 16, + "minimum_should_match": "3<90%", + "prefix_length": 12, + "transpositions": true, + "type": "phrase", + "tie_breaker": 0.3, + "operator": "AND", + "zero_terms_query": "all", + "slop": 2, + "query": "test", + "fields": []string{"title","body"}, + }, + }, + }, + }) +} From f7d496389e40b87c97f76785354c504fb63f5f5c Mon Sep 17 00:00:00 2001 From: Caleb Champlin Date: Sat, 17 Oct 2020 13:42:23 -0600 Subject: [PATCH 02/15] Add support for highlights --- highlight.go | 341 ++++++++++++++++++++++++++++++++++++++++++++++ highlight_test.go | 93 +++++++++++++ search.go | 23 +++- 3 files changed, 451 insertions(+), 6 deletions(-) create mode 100644 highlight.go create mode 100644 highlight_test.go diff --git a/highlight.go b/highlight.go new file mode 100644 index 0000000..9c076bc --- /dev/null +++ b/highlight.go @@ -0,0 +1,341 @@ +package esquery + +import ( + "github.com/fatih/structs" +) + +// Map returns a map representation of the highlight; implementing the +// Mappable interface. +func (q *QueryHighlight) Map() map[string]interface{} { + results := structs.Map(q.params) + if q.highlightQuery != nil { + results["query"] = q.highlightQuery.Map() + } + if q.fields != nil && len(q.fields) > 0 { + fields := make(map[string]interface{}) + for k, v := range q.fields { + fields[k] = v.Map() + } + results["fields"] = fields + } + return results +} + +type QueryHighlight struct { + highlightQuery Mappable `structs:"highlight_query,omitempty"` + fields map[string]*QueryHighlight `structs:"fields"` + params highlighParams +} + +type highlighParams struct { + PreTags []string `structs:"pre_tags,omitempty"` + PostTags []string `structs:"post_tags,omitempty"` + + FragmentSize uint16 `structs:"fragment_size,omitempty"` + NumberOfFragments uint16 `structs:"number_of_fragments,omitempty"` + Type HighlightType `structs:"type,string,omitempty"` + BoundaryChars string `structs:"boundary_chars,omitempty"` + BoundaryMaxScan uint16 `structs:"boundary_max_scan,omitempty"` + BoundaryScanner HighlightBoundaryScanner `structs:"boundary_scanner,string,omitempty"` + BoundaryScannerLocale string `structs:"boundary_scanner_locale,omitempty"` + Encoder HighlightEncoder `structs:"encoder,string,omitempty"` + ForceSource *bool `structs:"force_source,omitempty"` + Fragmenter HighlightFragmenter `structs:"fragmenter,string,omitempty"` + FragmentOffset uint16 `structs:"fragment_offset,omitempty"` + MatchedFields []string `structs:"matched_fields,omitempty"` + NoMatchSize uint16 `structs:"no_match_size,omitempty"` + Order HighlightOrder `structs:"order,string,omitempty"` + PhraseLimit uint16 `structs:"phrase_limit,omitempty"` + RequireFieldMatch *bool `structs:"require_field_match,omitempty"` + TagsSchema HighlightTagsSchema `structs:"tags_schema,string,omitempty"` +} + +// Highlight creates a new "query" of type "highlight" +func Highlight() *QueryHighlight { + return newHighlight() +} + +func newHighlight() *QueryHighlight { + return &QueryHighlight{ + fields: make(map[string]*QueryHighlight), + params: highlighParams{}, + } +} + +// PreTags sets the highlight query's pre_tags ignore unmapped field +func (q *QueryHighlight) PreTags(s ...string) *QueryHighlight { + q.params.PreTags = append(q.params.PreTags,s...) + return q +} + +// PostTags sets the highlight query's post_tags ignore unmapped field +func (q *QueryHighlight) PostTags(s ...string) *QueryHighlight { + q.params.PostTags = append(q.params.PostTags,s...) + return q +} + +// Field sets an entry the highlight query's fields +func (q *QueryHighlight) Field(name string, h ...*QueryHighlight) *QueryHighlight { + var fld *QueryHighlight + if len(h) > 0 { + fld = h[len(h)-1] + } else { + fld = &QueryHighlight{} + } + q.fields[name] = fld + return q +} + +// Fields sets all entries for the highlight query's fields +func (q *QueryHighlight) Fields(h map[string]*QueryHighlight) *QueryHighlight { + q.fields = h + return q +} + +// FragmentSize sets the highlight query's fragment_size ignore unmapped field +func (q *QueryHighlight) FragmentSize(i uint16) *QueryHighlight { + q.params.FragmentSize = i + return q +} + +// NumberOfFragments sets the highlight query's number_of_fragments ignore unmapped field +func (q *QueryHighlight) NumberOfFragments(i uint16) *QueryHighlight { + q.params.NumberOfFragments = i + return q +} + +// Type sets the highlight query's type ignore unmapped field +func (q *QueryHighlight) Type(t HighlightType) *QueryHighlight { + q.params.Type = t + return q +} + +// BoundaryChars sets the highlight query's boundary_chars ignore unmapped field +func (q *QueryHighlight) BoundaryChars(s string) *QueryHighlight { + q.params.BoundaryChars = s + return q +} + +// BoundaryMaxScan sets the highlight query's boundary_max_scan ignore unmapped field +func (q *QueryHighlight) BoundaryMaxScan(i uint16) *QueryHighlight { + q.params.BoundaryMaxScan = i + return q +} + +// BoundaryScanner sets the highlight query's boundary_scanner ignore unmapped field +func (q *QueryHighlight) BoundaryScanner(t HighlightBoundaryScanner) *QueryHighlight { + q.params.BoundaryScanner = t + return q +} + +// BoundaryScannerLocale sets the highlight query's boundary_scanner_locale ignore unmapped field +func (q *QueryHighlight) BoundaryScannerLocale(l string) *QueryHighlight { + q.params.BoundaryScannerLocale = l + return q +} + +// Encoder sets the highlight query's encoder ignore unmapped field +func (q *QueryHighlight) Encoder(e HighlightEncoder) *QueryHighlight { + q.params.Encoder = e + return q +} + +// ForceSource sets the highlight query's force_source ignore unmapped field +func (q *QueryHighlight) ForceSource(b bool) *QueryHighlight { + q.params.ForceSource = &b + return q +} + +// Fragmenter sets the highlight query's fragmenter ignore unmapped field +func (q *QueryHighlight) Fragmenter(f HighlightFragmenter) *QueryHighlight { + q.params.Fragmenter = f + return q +} + +// FragmentOffset sets the highlight query's fragment_offset ignore unmapped field +func (q *QueryHighlight) FragmentOffset(i uint16) *QueryHighlight { + q.params.FragmentOffset = i + return q +} + +// HighlightQuery sets the highlight query's highlight_query ignore unmapped field +func (q *QueryHighlight) HighlightQuery(b Mappable) *QueryHighlight { + q.highlightQuery = b + return q +} + +// MatchedFields sets the highlight query's matched_fields ignore unmapped field +func (q *QueryHighlight) MatchedFields(s ...string) *QueryHighlight { + q.params.MatchedFields = append(q.params.MatchedFields,s...) + return q +} + +// NoMatchSize sets the highlight query's no_match_size ignore unmapped field +func (q *QueryHighlight) NoMatchSize(i uint16) *QueryHighlight { + q.params.NoMatchSize = i + return q +} + +// Order sets the nested highlight's score order unmapped field +func (q *QueryHighlight) Order(o HighlightOrder) *QueryHighlight { + q.params.Order = o + return q +} + +// PhraseLimit sets the highlight query's phrase_limit ignore unmapped field +func (q *QueryHighlight) PhraseLimit(i uint16) *QueryHighlight { + q.params.PhraseLimit = i + return q +} + +// RequireFieldMatch sets the highlight query's require_field_match ignore unmapped field +func (q *QueryHighlight) RequireFieldMatch(b bool) *QueryHighlight { + q.params.RequireFieldMatch = &b + return q +} + +// TagsSchema sets the highlight query's tags_schema ignore unmapped field +func (q *QueryHighlight) TagsSchema(s HighlightTagsSchema) *QueryHighlight { + q.params.TagsSchema = s + return q +} + +type HighlightType uint8 + +const ( + // HighlighterUnified is the "unified" value + HighlighterUnified HighlightType = iota + + // HighlighterPlain is the "plain" value + HighlighterPlain + + // HighlighterFvh is the "fvh" value + HighlighterFvh +) + +// String returns a string representation of the type parameter, as +// known to ElasticSearch. +func (a HighlightType) String() string { + switch a { + case HighlighterUnified: + return "unified" + case HighlighterPlain: + return "plain" + case HighlighterFvh: + return "fvh" + } + return "" +} + +type HighlightBoundaryScanner uint8 + +const ( + BoundaryScannerDefault HighlightBoundaryScanner = iota + + // BoundaryScannerChars is the "chars" value + BoundaryScannerChars + + // BoundaryScannerSentence is the "sentence" value + BoundaryScannerSentence + + // BoundaryScannerWord is the "word" value + BoundaryScannerWord +) + +// String returns a string representation of the boundary_scanner parameter, as +// known to ElasticSearch. +func (a HighlightBoundaryScanner) String() string { + switch a { + case BoundaryScannerChars: + return "chars" + case BoundaryScannerSentence: + return "sentence" + case BoundaryScannerWord: + return "word" + } + return "" +} + +type HighlightEncoder uint8 + +const ( + // EncoderDefault is the "default" value + EncoderDefault HighlightEncoder = iota + + // EncoderHtml is the "html" value + EncoderHtml +) + +// String returns a string representation of the encoder parameter, as +// known to ElasticSearch. +func (a HighlightEncoder) String() string { + switch a { + case EncoderDefault: + return "default" + case EncoderHtml: + return "html" + } + return "" +} + +type HighlightFragmenter uint8 + +const ( + // FragmentSpan is the "span" value + FragmenterSpan HighlightFragmenter = iota + + // FragmenterSimple is the "simple" value + FragmenterSimple +) + +// String returns a string representation of the fragmenter parameter, as +// known to ElasticSearch. +func (a HighlightFragmenter) String() string { + switch a { + case FragmenterSpan: + return "span" + case FragmenterSimple: + return "simple" + } + return "" +} + +type HighlightOrder uint8 + +const ( + // OrderNone is the "none" value + OrderNone HighlightOrder = iota + + // OrderScore is the "score" value + OrderScore +) + +// String returns a string representation of the order parameter, as +// known to ElasticSearch. +func (a HighlightOrder) String() string { + switch a { + case OrderNone: + return "none" + case OrderScore: + return "score" + } + return "" +} + +type HighlightTagsSchema uint8 + +const ( + TagsSchemaDefault HighlightTagsSchema = iota + // TagsSchemaStyled is the "styled" value + TagsSchemaStyled +) + +// String returns a string representation of the tags_schema parameter, as +// known to ElasticSearch. +func (a HighlightTagsSchema) String() string { + switch a { + case TagsSchemaStyled: + return "styled" + } + return "" +} diff --git a/highlight_test.go b/highlight_test.go new file mode 100644 index 0000000..76456bf --- /dev/null +++ b/highlight_test.go @@ -0,0 +1,93 @@ +package esquery + +import ( + "testing" +) + +func TestHighlight(t *testing.T) { + runMapTests(t, []mapTest{ + { + "simple highlight", + Highlight().Field("content"), + map[string]interface{}{ + "fields": map[string]interface{} { + "content": map[string]interface{}{}, + }, + }, + }, + { + "highlight all params", + Highlight(). + PreTags("
","").
+			PostTags("","
"). + Field("content", + Highlight(). + BoundaryChars(".;,")). + FragmentSize(150). + NumberOfFragments(4). + Type(HighlighterPlain). + BoundaryChars("()[]"). + BoundaryMaxScan(32). + BoundaryScanner(BoundaryScannerChars). + BoundaryScannerLocale("en-US"). + Encoder(EncoderHtml). + ForceSource(true). + Fragmenter(FragmenterSimple). + FragmentOffset(6). + HighlightQuery( + Bool(). + Must( + Match("author"). + Query("some guy"). + Analyzer("analyzer?"). + Fuzziness("fuzz"))). + MatchedFields("title","body"). + NoMatchSize(64). + Order(OrderScore). + PhraseLimit(512). + RequireFieldMatch(false). + TagsSchema(TagsSchemaStyled), + map[string]interface{}{ + "pre_tags": []string{"
",""},
+				"post_tags": []string{"","
"}, + "fragment_size": 150, + "number_of_fragments": 4, + "type": "plain", + "boundary_chars": "()[]", + "boundary_scanner": "chars", + "boundary_max_scan": 32, + "boundary_scanner_locale": "en-US", + "encoder": "html", + "force_source": true, + "fragment_offset": 6, + "fragmenter": "simple", + "matched_fields": []string{"title","body"}, + "no_match_size": 64, + "order": "score", + "phrase_limit": 512, + "require_field_match": false, + "tags_schema": "styled", + "fields": map[string]interface{}{ + "content": map[string]interface{}{ + "boundary_chars": ".;,", + }, + }, + "query": map[string]interface{} { + "bool": map[string]interface{} { + "must": []map[string]interface{} { + { + "match": map[string]interface{} { + "author": map[string]interface{} { + "analyzer": "analyzer?", + "fuzziness": "fuzz", + "query": "some guy", + }, + }, + }, + }, + }, + }, + }, + }, + }) +} diff --git a/search.go b/search.go index 3da937e..39dabf6 100644 --- a/search.go +++ b/search.go @@ -15,15 +15,16 @@ import ( // Not all features of the search API are currently supported, but a request can // currently include a query, aggregations, and more. type SearchRequest struct { - query Mappable aggs []Aggregation - postFilter Mappable - from *uint64 - size *uint64 explain *bool - timeout *time.Duration - source Source + from *uint64 + highlight Mappable + postFilter Mappable + query Mappable + size *uint64 sort Sort + source Source + timeout *time.Duration } // Search creates a new SearchRequest object, to be filled via method chaining. @@ -98,6 +99,13 @@ func (req *SearchRequest) SourceExcludes(keys ...string) *SearchRequest { return req } +// Highlight sets a highlight for the request. +func (req *SearchRequest) Highlight(highlight Mappable) *SearchRequest { + req.highlight = highlight + return req +} + + // Map implements the Mappable interface. It converts the request to into a // nested map[string]interface{}, as expected by the go-elasticsearch library. func (req *SearchRequest) Map() map[string]interface{} { @@ -131,6 +139,9 @@ func (req *SearchRequest) Map() map[string]interface{} { if req.timeout != nil { m["timeout"] = fmt.Sprintf("%.0fs", req.timeout.Seconds()) } + if req.highlight != nil { + m["highlight"] = req.highlight.Map() + } source := req.source.Map() if len(source) > 0 { From 39f0dd59c18be966df81ec2604b318a5febad9ba Mon Sep 17 00:00:00 2001 From: Caleb Champlin Date: Sat, 17 Oct 2020 13:42:50 -0600 Subject: [PATCH 03/15] Add support for nested aggregations and filtered aggregations --- aggregations_test.go | 34 ++++++++++++++++++++++++++++ aggs_filter.go | 49 ++++++++++++++++++++++++++++++++++++++++ aggs_filter_test.go | 42 +++++++++++++++++++++++++++++++++++ aggs_nested.go | 53 ++++++++++++++++++++++++++++++++++++++++++++ aggs_nested_test.go | 34 ++++++++++++++++++++++++++++ 5 files changed, 212 insertions(+) create mode 100644 aggs_filter.go create mode 100644 aggs_filter_test.go create mode 100644 aggs_nested.go create mode 100644 aggs_nested_test.go diff --git a/aggregations_test.go b/aggregations_test.go index d113f2d..39eeaa3 100644 --- a/aggregations_test.go +++ b/aggregations_test.go @@ -58,5 +58,39 @@ func TestAggregations(t *testing.T) { }, }, }, + { + "a complex, multi-aggregation, nested", + Aggregate( + NestedAgg("categories","categories"). + Aggs(TermsAgg("type","outdoors")), + FilterAgg("filtered", + Term("type", "t-shirt")), + ), + map[string]interface{}{ + "aggs": map[string]interface{}{ + "categories": map[string]interface{}{ + "nested": map[string]interface{}{ + "path": "categories", + }, + "aggs": map[string]interface{} { + "type": map[string]interface{} { + "terms": map[string]interface{} { + "field": "outdoors", + }, + }, + }, + }, + "filtered": map[string]interface{}{ + "filter": map[string]interface{}{ + "term": map[string]interface{}{ + "type": map[string]interface{} { + "value": "t-shirt", + }, + }, + }, + }, + }, + }, + }, }) } diff --git a/aggs_filter.go b/aggs_filter.go new file mode 100644 index 0000000..9927f98 --- /dev/null +++ b/aggs_filter.go @@ -0,0 +1,49 @@ +package esquery + +type FilterAggregation struct { + name string + filter Mappable + aggs []Aggregation +} + +// Filter creates a new aggregation of type "filter". The method name includes +// the "Agg" suffix to prevent conflict with the "filter" query. +func FilterAgg(name string, filter Mappable) *FilterAggregation { + return &FilterAggregation{ + name: name, + filter: filter, + } +} + +// Name returns the name of the aggregation. +func (agg *FilterAggregation) Name() string { + return agg.name +} + +// Filter sets the filter items +func (agg *FilterAggregation) Filter(filter Mappable) *FilterAggregation { + agg.filter = filter + return agg +} + +// Aggs sets sub-aggregations for the aggregation. +func (agg *FilterAggregation) Aggs(aggs ...Aggregation) *FilterAggregation { + agg.aggs = aggs + return agg +} + +func (agg *FilterAggregation) Map() map[string]interface{} { + outerMap := map[string]interface{}{ + "filter": agg.filter.Map(), + } + + if len(agg.aggs) > 0 { + subAggs := make(map[string]map[string]interface{}) + for _, sub := range agg.aggs { + subAggs[sub.Name()] = sub.Map() + } + outerMap["aggs"] = subAggs + } + + return outerMap +} diff --git a/aggs_filter_test.go b/aggs_filter_test.go new file mode 100644 index 0000000..1ec30aa --- /dev/null +++ b/aggs_filter_test.go @@ -0,0 +1,42 @@ +package esquery + +import "testing" + +func TestFilterAggs(t *testing.T) { + runMapTests(t, []mapTest{ + { + "filter agg: simple", + FilterAgg("filtered", Term("type","t-shirt")), + map[string]interface{}{ + "filter": map[string]interface{}{ + "term": map[string]interface{} { + "type": map[string]interface{} { + "value": "t-shirt", + }, + }, + }, + }, + }, + { + "filter agg: with aggs", + FilterAgg("filtered", Term("type","t-shirt")). + Aggs(Avg("avg_price","price")), + map[string]interface{}{ + "filter": map[string]interface{}{ + "term": map[string]interface{} { + "type": map[string]interface{} { + "value": "t-shirt", + }, + }, + }, + "aggs": map[string]interface{} { + "avg_price": map[string]interface{} { + "avg": map[string]interface{}{ + "field": "price", + }, + }, + }, + }, + }, + }) +} diff --git a/aggs_nested.go b/aggs_nested.go new file mode 100644 index 0000000..888f819 --- /dev/null +++ b/aggs_nested.go @@ -0,0 +1,53 @@ +package esquery + +type NestedAggregation struct { + name string + path string + aggs []Aggregation +} + +// NestedAgg creates a new aggregation of type "nested". The method name includes +// the "Agg" suffix to prevent conflict with the "nested" query. +func NestedAgg(name string, path string) *NestedAggregation { + return &NestedAggregation{ + name: name, + path: path, + } +} + +// Name returns the name of the aggregation. +func (agg *NestedAggregation) Name() string { + return agg.name +} + +// NumberOfFragments sets the aggregations path +func (agg *NestedAggregation) Path(p string) *NestedAggregation { + agg.path = p + return agg +} + +// Aggs sets sub-aggregations for the aggregation. +func (agg *NestedAggregation) Aggs(aggs ...Aggregation) *NestedAggregation { + agg.aggs = aggs + return agg +} + +func (agg *NestedAggregation) Map() map[string]interface{} { + innerMap := map[string]interface{}{ + "path": agg.path, + } + + outerMap := map[string]interface{}{ + "nested": innerMap, + } + + if len(agg.aggs) > 0 { + subAggs := make(map[string]map[string]interface{}) + for _, sub := range agg.aggs { + subAggs[sub.Name()] = sub.Map() + } + outerMap["aggs"] = subAggs + } + + return outerMap +} diff --git a/aggs_nested_test.go b/aggs_nested_test.go new file mode 100644 index 0000000..7384e6c --- /dev/null +++ b/aggs_nested_test.go @@ -0,0 +1,34 @@ +package esquery + +import "testing" + +func TestNestedAggs(t *testing.T) { + runMapTests(t, []mapTest{ + { + "nested agg: simple", + NestedAgg("simple", "categories"), + map[string]interface{}{ + "nested": map[string]interface{}{ + "path": "categories", + }, + }, + }, + { + "nested agg: with aggs", + NestedAgg("more_nested", "authors"). + Aggs(TermsAgg("authors","name")), + map[string]interface{}{ + "nested": map[string]interface{}{ + "path": "authors", + }, + "aggs": map[string]interface{} { + "authors": map[string]interface{} { + "terms": map[string]interface{} { + "field": "name", + }, + }, + }, + }, + }, + }) +} From 52ccf20965c6473a65baa29d4e9ba3950af5c801 Mon Sep 17 00:00:00 2001 From: Caleb Champlin Date: Sat, 17 Oct 2020 13:43:02 -0600 Subject: [PATCH 04/15] Update README --- README.md | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 22d476d..7e0eea7 100644 --- a/README.md +++ b/README.md @@ -76,7 +76,7 @@ func main() { ). Size(20). Run( - es, + es, es.Search.WithContext(context.TODO()), es.Search.WithIndex("test"), ) @@ -124,6 +124,7 @@ The following queries are currently supported: | `"match_phrase_prefix"` | `MatchPhrasePrefix()` | | `"match_all"` | `MatchAll()` | | `"match_none"` | `MatchNone()` | +| `"multi_match"` | `MultiMatch()` | | `"exists"` | `Exists()` | | `"fuzzy"` | `Fuzzy()` | | `"ids"` | `IDs()` | @@ -158,6 +159,23 @@ The following aggregations are currently supported: | `"top_hits"` | `TopHits()` | | `"terms"` | `TermsAgg()` | +### Supported Top Level Options + +The following top level options are currently supported: + +| ElasticSearch DSL | `esquery.Search` Function | +| ------------------------|--------------------------------------- | +| `"highlight"` | `Highlight()` | +| `"explain"` | `Explain()` | +| `"from"` | `From()` | +| `"postFilter"` | `PostFilter()` | +| `"query"` | `Query()` | +| `"aggs"` | `Aggs()` | +| `"size"` | `Size()` | +| `"sort"` | `Sort()` | +| `"source"` | `SourceIncludes(), SourceExcludes()` | +| `"timeout"` | `Timeout()` | + #### Custom Queries and Aggregations To execute an arbitrary query or aggregation (including those not yet supported by the library), use the `CustomQuery()` or `CustomAgg()` functions, respectively. Both accept any `map[string]interface{}` value. From 37dac903ccd139cb208dda45f7620e5d94ffad16 Mon Sep 17 00:00:00 2001 From: Caleb Champlin Date: Sat, 17 Oct 2020 13:46:37 -0600 Subject: [PATCH 05/15] Fix formatting --- aggs_filter.go | 8 +-- aggs_filter_test.go | 18 +++---- aggs_nested.go | 8 +-- aggs_nested_test.go | 8 +-- highlight.go | 8 +-- highlight_test.go | 108 +++++++++++++++++++------------------- query_multi_match.go | 34 ++++++------ query_multi_match_test.go | 70 ++++++++++++------------ 8 files changed, 131 insertions(+), 131 deletions(-) diff --git a/aggs_filter.go b/aggs_filter.go index 9927f98..66770e2 100644 --- a/aggs_filter.go +++ b/aggs_filter.go @@ -1,16 +1,16 @@ package esquery type FilterAggregation struct { - name string - filter Mappable - aggs []Aggregation + name string + filter Mappable + aggs []Aggregation } // Filter creates a new aggregation of type "filter". The method name includes // the "Agg" suffix to prevent conflict with the "filter" query. func FilterAgg(name string, filter Mappable) *FilterAggregation { return &FilterAggregation{ - name: name, + name: name, filter: filter, } } diff --git a/aggs_filter_test.go b/aggs_filter_test.go index 1ec30aa..3c702a2 100644 --- a/aggs_filter_test.go +++ b/aggs_filter_test.go @@ -6,11 +6,11 @@ func TestFilterAggs(t *testing.T) { runMapTests(t, []mapTest{ { "filter agg: simple", - FilterAgg("filtered", Term("type","t-shirt")), + FilterAgg("filtered", Term("type", "t-shirt")), map[string]interface{}{ "filter": map[string]interface{}{ - "term": map[string]interface{} { - "type": map[string]interface{} { + "term": map[string]interface{}{ + "type": map[string]interface{}{ "value": "t-shirt", }, }, @@ -19,18 +19,18 @@ func TestFilterAggs(t *testing.T) { }, { "filter agg: with aggs", - FilterAgg("filtered", Term("type","t-shirt")). - Aggs(Avg("avg_price","price")), + FilterAgg("filtered", Term("type", "t-shirt")). + Aggs(Avg("avg_price", "price")), map[string]interface{}{ "filter": map[string]interface{}{ - "term": map[string]interface{} { - "type": map[string]interface{} { + "term": map[string]interface{}{ + "type": map[string]interface{}{ "value": "t-shirt", }, }, }, - "aggs": map[string]interface{} { - "avg_price": map[string]interface{} { + "aggs": map[string]interface{}{ + "avg_price": map[string]interface{}{ "avg": map[string]interface{}{ "field": "price", }, diff --git a/aggs_nested.go b/aggs_nested.go index 888f819..9b262a9 100644 --- a/aggs_nested.go +++ b/aggs_nested.go @@ -1,16 +1,16 @@ package esquery type NestedAggregation struct { - name string - path string - aggs []Aggregation + name string + path string + aggs []Aggregation } // NestedAgg creates a new aggregation of type "nested". The method name includes // the "Agg" suffix to prevent conflict with the "nested" query. func NestedAgg(name string, path string) *NestedAggregation { return &NestedAggregation{ - name: name, + name: name, path: path, } } diff --git a/aggs_nested_test.go b/aggs_nested_test.go index 7384e6c..87c014b 100644 --- a/aggs_nested_test.go +++ b/aggs_nested_test.go @@ -16,14 +16,14 @@ func TestNestedAggs(t *testing.T) { { "nested agg: with aggs", NestedAgg("more_nested", "authors"). - Aggs(TermsAgg("authors","name")), + Aggs(TermsAgg("authors", "name")), map[string]interface{}{ "nested": map[string]interface{}{ "path": "authors", }, - "aggs": map[string]interface{} { - "authors": map[string]interface{} { - "terms": map[string]interface{} { + "aggs": map[string]interface{}{ + "authors": map[string]interface{}{ + "terms": map[string]interface{}{ "field": "name", }, }, diff --git a/highlight.go b/highlight.go index 9c076bc..855f114 100644 --- a/highlight.go +++ b/highlight.go @@ -22,7 +22,7 @@ func (q *QueryHighlight) Map() map[string]interface{} { } type QueryHighlight struct { - highlightQuery Mappable `structs:"highlight_query,omitempty"` + highlightQuery Mappable `structs:"highlight_query,omitempty"` fields map[string]*QueryHighlight `structs:"fields"` params highlighParams } @@ -64,13 +64,13 @@ func newHighlight() *QueryHighlight { // PreTags sets the highlight query's pre_tags ignore unmapped field func (q *QueryHighlight) PreTags(s ...string) *QueryHighlight { - q.params.PreTags = append(q.params.PreTags,s...) + q.params.PreTags = append(q.params.PreTags, s...) return q } // PostTags sets the highlight query's post_tags ignore unmapped field func (q *QueryHighlight) PostTags(s ...string) *QueryHighlight { - q.params.PostTags = append(q.params.PostTags,s...) + q.params.PostTags = append(q.params.PostTags, s...) return q } @@ -166,7 +166,7 @@ func (q *QueryHighlight) HighlightQuery(b Mappable) *QueryHighlight { // MatchedFields sets the highlight query's matched_fields ignore unmapped field func (q *QueryHighlight) MatchedFields(s ...string) *QueryHighlight { - q.params.MatchedFields = append(q.params.MatchedFields,s...) + q.params.MatchedFields = append(q.params.MatchedFields, s...) return q } diff --git a/highlight_test.go b/highlight_test.go index 76456bf..ac95c57 100644 --- a/highlight_test.go +++ b/highlight_test.go @@ -10,7 +10,7 @@ func TestHighlight(t *testing.T) { "simple highlight", Highlight().Field("content"), map[string]interface{}{ - "fields": map[string]interface{} { + "fields": map[string]interface{}{ "content": map[string]interface{}{}, }, }, @@ -18,69 +18,69 @@ func TestHighlight(t *testing.T) { { "highlight all params", Highlight(). - PreTags("
","").
-			PostTags("","
"). - Field("content", + PreTags("
", "").
+				PostTags("", "
"). + Field("content", Highlight(). - BoundaryChars(".;,")). - FragmentSize(150). - NumberOfFragments(4). - Type(HighlighterPlain). - BoundaryChars("()[]"). - BoundaryMaxScan(32). - BoundaryScanner(BoundaryScannerChars). - BoundaryScannerLocale("en-US"). - Encoder(EncoderHtml). - ForceSource(true). - Fragmenter(FragmenterSimple). - FragmentOffset(6). - HighlightQuery( + BoundaryChars(".;,")). + FragmentSize(150). + NumberOfFragments(4). + Type(HighlighterPlain). + BoundaryChars("()[]"). + BoundaryMaxScan(32). + BoundaryScanner(BoundaryScannerChars). + BoundaryScannerLocale("en-US"). + Encoder(EncoderHtml). + ForceSource(true). + Fragmenter(FragmenterSimple). + FragmentOffset(6). + HighlightQuery( Bool(). - Must( - Match("author"). - Query("some guy"). - Analyzer("analyzer?"). - Fuzziness("fuzz"))). - MatchedFields("title","body"). - NoMatchSize(64). - Order(OrderScore). - PhraseLimit(512). - RequireFieldMatch(false). - TagsSchema(TagsSchemaStyled), + Must( + Match("author"). + Query("some guy"). + Analyzer("analyzer?"). + Fuzziness("fuzz"))). + MatchedFields("title", "body"). + NoMatchSize(64). + Order(OrderScore). + PhraseLimit(512). + RequireFieldMatch(false). + TagsSchema(TagsSchemaStyled), map[string]interface{}{ - "pre_tags": []string{"
",""},
-				"post_tags": []string{"","
"}, - "fragment_size": 150, - "number_of_fragments": 4, - "type": "plain", - "boundary_chars": "()[]", - "boundary_scanner": "chars", - "boundary_max_scan": 32, + "pre_tags": []string{"
", ""},
+				"post_tags":               []string{"", "
"}, + "fragment_size": 150, + "number_of_fragments": 4, + "type": "plain", + "boundary_chars": "()[]", + "boundary_scanner": "chars", + "boundary_max_scan": 32, "boundary_scanner_locale": "en-US", - "encoder": "html", - "force_source": true, - "fragment_offset": 6, - "fragmenter": "simple", - "matched_fields": []string{"title","body"}, - "no_match_size": 64, - "order": "score", - "phrase_limit": 512, - "require_field_match": false, - "tags_schema": "styled", + "encoder": "html", + "force_source": true, + "fragment_offset": 6, + "fragmenter": "simple", + "matched_fields": []string{"title", "body"}, + "no_match_size": 64, + "order": "score", + "phrase_limit": 512, + "require_field_match": false, + "tags_schema": "styled", "fields": map[string]interface{}{ "content": map[string]interface{}{ - "boundary_chars": ".;,", + "boundary_chars": ".;,", }, }, - "query": map[string]interface{} { - "bool": map[string]interface{} { - "must": []map[string]interface{} { + "query": map[string]interface{}{ + "bool": map[string]interface{}{ + "must": []map[string]interface{}{ { - "match": map[string]interface{} { - "author": map[string]interface{} { - "analyzer": "analyzer?", + "match": map[string]interface{}{ + "author": map[string]interface{}{ + "analyzer": "analyzer?", "fuzziness": "fuzz", - "query": "some guy", + "query": "some guy", }, }, }, diff --git a/query_multi_match.go b/query_multi_match.go index d1009ed..0d5f2a2 100644 --- a/query_multi_match.go +++ b/query_multi_match.go @@ -17,23 +17,23 @@ func (q *MultiMatchQuery) Map() map[string]interface{} { } type multiMatchParams struct { - Qry interface{} `structs:"query"` - Fields []string `structs:"fields"` - Type MultiMatchType `structs:"type,string,omitempty"` - TieBrk float32 `structs:"tie_breaker,omitempty"` - Boost float32 `structs:"boost,omitempty"` - Anl string `structs:"analyzer,omitempty"` - AutoGenerate *bool `structs:"auto_generate_synonyms_phrase_query,omitempty"` - Fuzz string `structs:"fuzziness,omitempty"` - MaxExp uint16 `structs:"max_expansions,omitempty"` - PrefLen uint16 `structs:"prefix_length,omitempty"` - Trans *bool `structs:"transpositions,omitempty"` - FuzzyRw string `structs:"fuzzy_rewrite,omitempty"` - Lent *bool `structs:"lenient,omitempty"` - Op MatchOperator `structs:"operator,string,omitempty"` - MinMatch string `structs:"minimum_should_match,omitempty"` - ZeroTerms ZeroTerms `structs:"zero_terms_query,string,omitempty"` - Slp uint16 `structs:"slop,omitempty"` + Qry interface{} `structs:"query"` + Fields []string `structs:"fields"` + Type MultiMatchType `structs:"type,string,omitempty"` + TieBrk float32 `structs:"tie_breaker,omitempty"` + Boost float32 `structs:"boost,omitempty"` + Anl string `structs:"analyzer,omitempty"` + AutoGenerate *bool `structs:"auto_generate_synonyms_phrase_query,omitempty"` + Fuzz string `structs:"fuzziness,omitempty"` + MaxExp uint16 `structs:"max_expansions,omitempty"` + PrefLen uint16 `structs:"prefix_length,omitempty"` + Trans *bool `structs:"transpositions,omitempty"` + FuzzyRw string `structs:"fuzzy_rewrite,omitempty"` + Lent *bool `structs:"lenient,omitempty"` + Op MatchOperator `structs:"operator,string,omitempty"` + MinMatch string `structs:"minimum_should_match,omitempty"` + ZeroTerms ZeroTerms `structs:"zero_terms_query,string,omitempty"` + Slp uint16 `structs:"slop,omitempty"` } // MultiMatch creates a new query of type "multi_match" diff --git a/query_multi_match_test.go b/query_multi_match_test.go index 1f61caa..084f666 100644 --- a/query_multi_match_test.go +++ b/query_multi_match_test.go @@ -8,53 +8,53 @@ func TestMultiMatch(t *testing.T) { runMapTests(t, []mapTest{ { "simple multi_match", - MultiMatch("value1","value2").Fields("title"), + MultiMatch("value1", "value2").Fields("title"), map[string]interface{}{ "multi_match": map[string]interface{}{ "fields": []string{"title"}, - "query": "value2", + "query": "value2", }, }, }, { "multi_match all params", MultiMatch("original"). - Query("test"). - Analyzer("stop"). - Fields("title","body"). - AutoGenerateSynonymsPhraseQuery(true). - Fuzziness("AUTO"). - MaxExpansions(16). - PrefixLength(12). - TieBreaker(0.3). - Boost(6.4). - Transpositions(true). - FuzzyRewrite("scoring_boolean"). - Lenient(true). - Operator(OperatorAnd). - Type(MatchTypePhrase). - MinimumShouldMatch("3<90%"). - Slop(2). - ZeroTermsQuery(ZeroTermsAll), + Query("test"). + Analyzer("stop"). + Fields("title", "body"). + AutoGenerateSynonymsPhraseQuery(true). + Fuzziness("AUTO"). + MaxExpansions(16). + PrefixLength(12). + TieBreaker(0.3). + Boost(6.4). + Transpositions(true). + FuzzyRewrite("scoring_boolean"). + Lenient(true). + Operator(OperatorAnd). + Type(MatchTypePhrase). + MinimumShouldMatch("3<90%"). + Slop(2). + ZeroTermsQuery(ZeroTermsAll), map[string]interface{}{ "multi_match": map[string]interface{}{ - "analyzer": "stop", + "analyzer": "stop", "auto_generate_synonyms_phrase_query": true, - "boost": 6.4, - "fuzziness": "AUTO", - "fuzzy_rewrite": "scoring_boolean", - "lenient": true, - "max_expansions": 16, - "minimum_should_match": "3<90%", - "prefix_length": 12, - "transpositions": true, - "type": "phrase", - "tie_breaker": 0.3, - "operator": "AND", - "zero_terms_query": "all", - "slop": 2, - "query": "test", - "fields": []string{"title","body"}, + "boost": 6.4, + "fuzziness": "AUTO", + "fuzzy_rewrite": "scoring_boolean", + "lenient": true, + "max_expansions": 16, + "minimum_should_match": "3<90%", + "prefix_length": 12, + "transpositions": true, + "type": "phrase", + "tie_breaker": 0.3, + "operator": "AND", + "zero_terms_query": "all", + "slop": 2, + "query": "test", + "fields": []string{"title", "body"}, }, }, }, From 8661572bd531aa158946e2978de456d547ce162a Mon Sep 17 00:00:00 2001 From: Hardy Date: Mon, 1 Mar 2021 16:21:23 +0800 Subject: [PATCH 06/15] fix --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index d17acbf..80a4d62 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/aquasecurity/esquery +module github.com/hardy4yooz/esquery go 1.13 From 09acfd6a3dbe8afd91169334ec7da8301a0913df Mon Sep 17 00:00:00 2001 From: Hardy Date: Tue, 2 Mar 2021 11:51:41 +0800 Subject: [PATCH 07/15] feat: add support for search after --- go.sum | 4 ---- search.go | 31 ++++++++++++++++++++----------- search_test.go | 7 +++++++ 3 files changed, 27 insertions(+), 15 deletions(-) diff --git a/go.sum b/go.sum index 4108027..fae67ff 100644 --- a/go.sum +++ b/go.sum @@ -1,9 +1,5 @@ -github.com/elastic/go-elasticsearch v0.0.0 h1:Pd5fqOuBxKxv83b0+xOAJDAkziWYwFinWnBO0y+TZaA= -github.com/elastic/go-elasticsearch v0.0.0/go.mod h1:TkBSJBuTyFdBnrNqoPc54FN0vKf5c04IdM4zuStJ7xg= github.com/elastic/go-elasticsearch/v7 v7.6.0 h1:sYpGLpEFHgLUKLsZUBfuaVI9QgHjS3JdH9fX4/z8QI8= github.com/elastic/go-elasticsearch/v7 v7.6.0/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4= -github.com/elastic/go-elasticsearch/v8 v8.0.0-20200210103600-aff00e5adfde h1:Y9SZx8RQqFycLxi5W5eFmxMqnmijULVc3LMjBTtZQdM= -github.com/elastic/go-elasticsearch/v8 v8.0.0-20200210103600-aff00e5adfde/go.mod h1:xe9a/L2aeOgFKKgrO3ibQTnMdpAeL0GC+5/HpGScSa4= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/jgroeneveld/schema v1.0.0 h1:J0E10CrOkiSEsw6dfb1IfrDJD14pf6QLVJ3tRPl/syI= diff --git a/search.go b/search.go index 39dabf6..03e934f 100644 --- a/search.go +++ b/search.go @@ -15,16 +15,17 @@ import ( // Not all features of the search API are currently supported, but a request can // currently include a query, aggregations, and more. type SearchRequest struct { - aggs []Aggregation - explain *bool - from *uint64 - highlight Mappable - postFilter Mappable - query Mappable - size *uint64 - sort Sort - source Source - timeout *time.Duration + aggs []Aggregation + explain *bool + from *uint64 + highlight Mappable + searchAfter []string + postFilter Mappable + query Mappable + size *uint64 + sort Sort + source Source + timeout *time.Duration } // Search creates a new SearchRequest object, to be filled via method chaining. @@ -74,6 +75,12 @@ func (req *SearchRequest) Sort(name string, order Order) *SearchRequest { return req } +// SearchAfter retrieve the sorted result +func (req *SearchRequest) SearchAfter(s ...string) *SearchRequest { + req.searchAfter = append(req.searchAfter, s...) + return req +} + // Explain sets whether the ElasticSearch API should return an explanation for // how each hit's score was calculated. func (req *SearchRequest) Explain(b bool) *SearchRequest { @@ -105,7 +112,6 @@ func (req *SearchRequest) Highlight(highlight Mappable) *SearchRequest { return req } - // Map implements the Mappable interface. It converts the request to into a // nested map[string]interface{}, as expected by the go-elasticsearch library. func (req *SearchRequest) Map() map[string]interface{} { @@ -142,6 +148,9 @@ func (req *SearchRequest) Map() map[string]interface{} { if req.highlight != nil { m["highlight"] = req.highlight.Map() } + if req.searchAfter != nil { + m["search_after"] = req.searchAfter + } source := req.source.Map() if len(source) > 0 { diff --git a/search_test.go b/search_test.go index ad8a1ce..1bcf948 100644 --- a/search_test.go +++ b/search_test.go @@ -7,6 +7,13 @@ import ( func TestSearchMaps(t *testing.T) { runMapTests(t, []mapTest{ + { + "a simple query with search after", + Search().SearchAfter("_id", "name"), + map[string]interface{}{ + "search_after": []string{"_id", "name"}, + }, + }, { "a simple match_all query with a size and no aggs", Search().Query(MatchAll()).Size(20), From bdfe2df1713ac032bdcef1d712e73f1d2244ed6d Mon Sep 17 00:00:00 2001 From: Hardy Date: Tue, 2 Mar 2021 13:30:39 +0800 Subject: [PATCH 08/15] fix:set search after []string to []interface --- search.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/search.go b/search.go index 03e934f..d84bbae 100644 --- a/search.go +++ b/search.go @@ -19,7 +19,7 @@ type SearchRequest struct { explain *bool from *uint64 highlight Mappable - searchAfter []string + searchAfter []interface{} postFilter Mappable query Mappable size *uint64 @@ -76,7 +76,7 @@ func (req *SearchRequest) Sort(name string, order Order) *SearchRequest { } // SearchAfter retrieve the sorted result -func (req *SearchRequest) SearchAfter(s ...string) *SearchRequest { +func (req *SearchRequest) SearchAfter(s ...interface{}) *SearchRequest { req.searchAfter = append(req.searchAfter, s...) return req } From e3c77e084983488d07d9eeefedba79d644177fd9 Mon Sep 17 00:00:00 2001 From: Hardy Date: Tue, 2 Mar 2021 17:09:29 +0800 Subject: [PATCH 09/15] fix:add .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..62c8935 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.idea/ \ No newline at end of file From cc685d325e71307c5a8135c43410ee0086359330 Mon Sep 17 00:00:00 2001 From: Hardy Date: Fri, 12 Mar 2021 19:37:19 +0800 Subject: [PATCH 10/15] feat:Support for term aggs order --- aggregations_test.go | 40 ++++++++++++++++++++++++++++++++++------ aggs_bucket.go | 10 ++++++++++ 2 files changed, 44 insertions(+), 6 deletions(-) diff --git a/aggregations_test.go b/aggregations_test.go index 39eeaa3..080ff30 100644 --- a/aggregations_test.go +++ b/aggregations_test.go @@ -61,8 +61,8 @@ func TestAggregations(t *testing.T) { { "a complex, multi-aggregation, nested", Aggregate( - NestedAgg("categories","categories"). - Aggs(TermsAgg("type","outdoors")), + NestedAgg("categories", "categories"). + Aggs(TermsAgg("type", "outdoors")), FilterAgg("filtered", Term("type", "t-shirt")), ), @@ -72,9 +72,9 @@ func TestAggregations(t *testing.T) { "nested": map[string]interface{}{ "path": "categories", }, - "aggs": map[string]interface{} { - "type": map[string]interface{} { - "terms": map[string]interface{} { + "aggs": map[string]interface{}{ + "type": map[string]interface{}{ + "terms": map[string]interface{}{ "field": "outdoors", }, }, @@ -83,7 +83,7 @@ func TestAggregations(t *testing.T) { "filtered": map[string]interface{}{ "filter": map[string]interface{}{ "term": map[string]interface{}{ - "type": map[string]interface{} { + "type": map[string]interface{}{ "value": "t-shirt", }, }, @@ -92,5 +92,33 @@ func TestAggregations(t *testing.T) { }, }, }, + { + "order for termsAggs", + //eq.Aggregate(eq.TermsAgg("a1", "FIELD1").Size(0).Aggs(eq.Sum("a2", "FIELD2.SUBFIELD"))) + Aggregate( + TermsAgg("categories", "categories"). + Order(map[string]string{"priceSum": "desc"}). + Size(5).Aggs(Sum("priceSum", "price"))), + map[string]interface{}{ + "aggs": map[string]interface{}{ + "categories": map[string]interface{}{ + "terms": map[string]interface{}{ + "field": "categories", + "order": map[string]interface{}{ + "priceSum": "desc", + }, + "size": 5, + }, + "aggs": map[string]interface{}{ + "priceSum": map[string]interface{}{ + "sum": map[string]interface{}{ + "field": "price", + }, + }, + }, + }, + }, + }, + }, }) } diff --git a/aggs_bucket.go b/aggs_bucket.go index 09b5185..c13a898 100644 --- a/aggs_bucket.go +++ b/aggs_bucket.go @@ -12,6 +12,7 @@ type TermsAggregation struct { shardSize *float64 showTermDoc *bool aggs []Aggregation + order map[string]string } // TermsAgg creates a new aggregation of type "terms". The method name includes @@ -54,6 +55,12 @@ func (agg *TermsAggregation) Aggs(aggs ...Aggregation) *TermsAggregation { return agg } +// Order sets the sort for terms agg +func (agg *TermsAggregation) Order(order map[string]string) *TermsAggregation { + agg.order = order + return agg +} + // Map returns a map representation of the aggregation, thus implementing the // Mappable interface. func (agg *TermsAggregation) Map() map[string]interface{} { @@ -70,6 +77,9 @@ func (agg *TermsAggregation) Map() map[string]interface{} { if agg.showTermDoc != nil { innerMap["show_term_doc_count_error"] = *agg.showTermDoc } + if agg.order != nil { + innerMap["order"] = agg.order + } outerMap := map[string]interface{}{ "terms": innerMap, From 12616dd9d3447b7f3d06799fb9ed690d1a7cc216 Mon Sep 17 00:00:00 2001 From: Hardy Date: Fri, 12 Mar 2021 20:54:48 +0800 Subject: [PATCH 11/15] Feat: Support include filter for termAggs --- aggregations_test.go | 36 ++++++++++++++++++++++++++++++++++++ aggs_bucket.go | 16 ++++++++++++++++ 2 files changed, 52 insertions(+) diff --git a/aggregations_test.go b/aggregations_test.go index 080ff30..949123d 100644 --- a/aggregations_test.go +++ b/aggregations_test.go @@ -120,5 +120,41 @@ func TestAggregations(t *testing.T) { }, }, }, + { + "Single include for termsAggs", + //eq.Aggregate(eq.TermsAgg("a1", "FIELD1").Size(0).Aggs(eq.Sum("a2", "FIELD2.SUBFIELD"))) + Aggregate( + TermsAgg("categories", "categories"). + Include("red.*|blue.*"), + ), + map[string]interface{}{ + "aggs": map[string]interface{}{ + "categories": map[string]interface{}{ + "terms": map[string]interface{}{ + "field": "categories", + "include": "red.*|blue.*", + }, + }, + }, + }, + }, + { + "Multi include for termsAggs", + //eq.Aggregate(eq.TermsAgg("a1", "FIELD1").Size(0).Aggs(eq.Sum("a2", "FIELD2.SUBFIELD"))) + Aggregate( + TermsAgg("categories", "categories"). + Include("red", "blue"), + ), + map[string]interface{}{ + "aggs": map[string]interface{}{ + "categories": map[string]interface{}{ + "terms": map[string]interface{}{ + "field": "categories", + "include": []string{"red", "blue"}, + }, + }, + }, + }, + }, }) } diff --git a/aggs_bucket.go b/aggs_bucket.go index c13a898..1a45a14 100644 --- a/aggs_bucket.go +++ b/aggs_bucket.go @@ -13,6 +13,7 @@ type TermsAggregation struct { showTermDoc *bool aggs []Aggregation order map[string]string + include []string } // TermsAgg creates a new aggregation of type "terms". The method name includes @@ -61,6 +62,12 @@ func (agg *TermsAggregation) Order(order map[string]string) *TermsAggregation { return agg } +// Include filter the values for buckets +func (agg *TermsAggregation) Include(include ...string) *TermsAggregation { + agg.include = include + return agg +} + // Map returns a map representation of the aggregation, thus implementing the // Mappable interface. func (agg *TermsAggregation) Map() map[string]interface{} { @@ -81,6 +88,15 @@ func (agg *TermsAggregation) Map() map[string]interface{} { innerMap["order"] = agg.order } + if agg.include != nil { + if len(agg.include) <= 1 { + innerMap["include"] = agg.include[0] + } else { + innerMap["include"] = agg.include + } + + } + outerMap := map[string]interface{}{ "terms": innerMap, } From ca0cfb32242363fbaaf8aef72f007fe975f83f08 Mon Sep 17 00:00:00 2001 From: Hardy Date: Mon, 15 Mar 2021 10:42:44 +0800 Subject: [PATCH 12/15] Update aggregations_test.go Fix conflict. --- aggregations_test.go | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/aggregations_test.go b/aggregations_test.go index 8730966..949123d 100644 --- a/aggregations_test.go +++ b/aggregations_test.go @@ -82,9 +82,8 @@ func TestAggregations(t *testing.T) { }, "filtered": map[string]interface{}{ "filter": map[string]interface{}{ - "term": map[string]interface{} - "type": map[string]interface{} { - + "term": map[string]interface{}{ + "type": map[string]interface{}{ "value": "t-shirt", }, }, @@ -93,7 +92,6 @@ func TestAggregations(t *testing.T) { }, }, }, - { "order for termsAggs", //eq.Aggregate(eq.TermsAgg("a1", "FIELD1").Size(0).Aggs(eq.Sum("a2", "FIELD2.SUBFIELD"))) From 0064054bc7b636b3f1f5207c2c918e60fbe709f0 Mon Sep 17 00:00:00 2001 From: Oran Moshai <12291998+oranmoshai@users.noreply.github.com> Date: Mon, 15 Mar 2021 09:20:44 +0200 Subject: [PATCH 13/15] Update go.mod --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 80a4d62..d17acbf 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/hardy4yooz/esquery +module github.com/aquasecurity/esquery go 1.13 From 00431fc79f07cf21e688070419b19944fae8cf6c Mon Sep 17 00:00:00 2001 From: Hardy Date: Thu, 27 May 2021 22:06:02 +0800 Subject: [PATCH 14/15] feat: Support Nested for joining queries. --- query_joining.go | 40 ++++++++++++++++++++++++++++++++++++++++ query_joining_test.go | 22 ++++++++++++++++++++++ 2 files changed, 62 insertions(+) create mode 100644 query_joining.go create mode 100644 query_joining_test.go diff --git a/query_joining.go b/query_joining.go new file mode 100644 index 0000000..6655de9 --- /dev/null +++ b/query_joining.go @@ -0,0 +1,40 @@ +package esquery + +// NestedQuery represents a query of type nested as described in: +// https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-nested-query.html +type NestedQuery struct { + path string + query Mappable + scoreMode string + ignoreUnmapped bool +} + +func Nested(path string, query Mappable) *NestedQuery { + return &NestedQuery{ + path: path, + query: query, + } +} + +func (n *NestedQuery) ScoreMode(mode string) *NestedQuery { + n.scoreMode = mode + return n +} + +func (n *NestedQuery) IgnoreUnmapped(val bool) *NestedQuery { + n.ignoreUnmapped = val + return n +} + +// Map returns a map representation of the query, thus implementing the +// Mappable interface. +func (n *NestedQuery) Map() map[string]interface{} { + innerMap := map[string]interface{}{"path": n.path, "query": n.query.Map()} + if n.scoreMode != "" { + innerMap["score_mode"] = n.scoreMode + } + if n.ignoreUnmapped == true { + innerMap["ignore_unmapped"] = n.ignoreUnmapped + } + return map[string]interface{}{"nested": innerMap} +} diff --git a/query_joining_test.go b/query_joining_test.go new file mode 100644 index 0000000..7b4636b --- /dev/null +++ b/query_joining_test.go @@ -0,0 +1,22 @@ +package esquery + +import ( + "testing" +) + +func TestNested(t *testing.T) { + runMapTests(t, []mapTest{ + { + "Nested Query", + Nested("dns_values", Term("dns_values.type", "A")).ScoreMode("max").IgnoreUnmapped(true), + map[string]interface{}{ + "nested": map[string]interface{}{ + "path": "dns_values", + "query": Term("dns_values.type", "A").Map(), + "score_mode": "max", + "ignore_unmapped": true, + }, + }, + }, + }) +} From 4951f7774d12d4a88dafb16d3d4862e566249149 Mon Sep 17 00:00:00 2001 From: danta Date: Fri, 11 Jun 2021 10:18:13 +0800 Subject: [PATCH 15/15] feat: add support for collapse --- go.mod | 2 +- search.go | 15 ++++++++++++--- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/go.mod b/go.mod index d17acbf..7f6ca00 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/aquasecurity/esquery +module github.com/okdanta/esquery go 1.13 diff --git a/search.go b/search.go index 5c661a2..cb406da 100644 --- a/search.go +++ b/search.go @@ -25,8 +25,8 @@ type SearchRequest struct { size *uint64 sort Sort source Source + collapse map[string]interface{} timeout *time.Duration - } // Search creates a new SearchRequest object, to be filled via method chaining. @@ -40,6 +40,14 @@ func (req *SearchRequest) Query(q Mappable) *SearchRequest { return req } +// Collapse sets one field to collapse for the request. +func (req *SearchRequest) Collapse(field string) *SearchRequest { + req.collapse = map[string]interface{}{ + "field": field, + } + return req +} + // Aggs sets one or more aggregations for the request. func (req *SearchRequest) Aggs(aggs ...Aggregation) *SearchRequest { req.aggs = append(req.aggs, aggs...) @@ -113,8 +121,6 @@ func (req *SearchRequest) Highlight(highlight Mappable) *SearchRequest { return req } - - // Map implements the Mappable interface. It converts the request to into a // nested map[string]interface{}, as expected by the go-elasticsearch library. func (req *SearchRequest) Map() map[string]interface{} { @@ -155,6 +161,9 @@ func (req *SearchRequest) Map() map[string]interface{} { m["search_after"] = req.searchAfter } + if req.collapse != nil { + m["collapse"] = req.collapse + } source := req.source.Map() if len(source) > 0 {