search-collapse #6
|
@ -1,16 +1,16 @@
|
|||
package esquery
|
||||
|
||||
type FilterAggregation struct {
|
||||
name string
|
||||
filter Mappable
|
||||
aggs []Aggregation
|
||||
name string
|
||||
filter Mappable
|
||||
aggs []Aggregation
|
||||
}
|
||||
|
||||
// Filter creates a new aggregation of type "filter". The method name includes
|
||||
// the "Agg" suffix to prevent conflict with the "filter" query.
|
||||
func FilterAgg(name string, filter Mappable) *FilterAggregation {
|
||||
return &FilterAggregation{
|
||||
name: name,
|
||||
name: name,
|
||||
filter: filter,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,11 +6,11 @@ func TestFilterAggs(t *testing.T) {
|
|||
runMapTests(t, []mapTest{
|
||||
{
|
||||
"filter agg: simple",
|
||||
FilterAgg("filtered", Term("type","t-shirt")),
|
||||
FilterAgg("filtered", Term("type", "t-shirt")),
|
||||
map[string]interface{}{
|
||||
"filter": map[string]interface{}{
|
||||
"term": map[string]interface{} {
|
||||
"type": map[string]interface{} {
|
||||
"term": map[string]interface{}{
|
||||
"type": map[string]interface{}{
|
||||
"value": "t-shirt",
|
||||
},
|
||||
},
|
||||
|
@ -19,18 +19,18 @@ func TestFilterAggs(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"filter agg: with aggs",
|
||||
FilterAgg("filtered", Term("type","t-shirt")).
|
||||
Aggs(Avg("avg_price","price")),
|
||||
FilterAgg("filtered", Term("type", "t-shirt")).
|
||||
Aggs(Avg("avg_price", "price")),
|
||||
map[string]interface{}{
|
||||
"filter": map[string]interface{}{
|
||||
"term": map[string]interface{} {
|
||||
"type": map[string]interface{} {
|
||||
"term": map[string]interface{}{
|
||||
"type": map[string]interface{}{
|
||||
"value": "t-shirt",
|
||||
},
|
||||
},
|
||||
},
|
||||
"aggs": map[string]interface{} {
|
||||
"avg_price": map[string]interface{} {
|
||||
"aggs": map[string]interface{}{
|
||||
"avg_price": map[string]interface{}{
|
||||
"avg": map[string]interface{}{
|
||||
"field": "price",
|
||||
},
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
package esquery
|
||||
|
||||
type NestedAggregation struct {
|
||||
name string
|
||||
path string
|
||||
aggs []Aggregation
|
||||
name string
|
||||
path string
|
||||
aggs []Aggregation
|
||||
}
|
||||
|
||||
// NestedAgg creates a new aggregation of type "nested". The method name includes
|
||||
// the "Agg" suffix to prevent conflict with the "nested" query.
|
||||
func NestedAgg(name string, path string) *NestedAggregation {
|
||||
return &NestedAggregation{
|
||||
name: name,
|
||||
name: name,
|
||||
path: path,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,14 +16,14 @@ func TestNestedAggs(t *testing.T) {
|
|||
{
|
||||
"nested agg: with aggs",
|
||||
NestedAgg("more_nested", "authors").
|
||||
Aggs(TermsAgg("authors","name")),
|
||||
Aggs(TermsAgg("authors", "name")),
|
||||
map[string]interface{}{
|
||||
"nested": map[string]interface{}{
|
||||
"path": "authors",
|
||||
},
|
||||
"aggs": map[string]interface{} {
|
||||
"authors": map[string]interface{} {
|
||||
"terms": map[string]interface{} {
|
||||
"aggs": map[string]interface{}{
|
||||
"authors": map[string]interface{}{
|
||||
"terms": map[string]interface{}{
|
||||
"field": "name",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -22,7 +22,7 @@ func (q *QueryHighlight) Map() map[string]interface{} {
|
|||
}
|
||||
|
||||
type QueryHighlight struct {
|
||||
highlightQuery Mappable `structs:"highlight_query,omitempty"`
|
||||
highlightQuery Mappable `structs:"highlight_query,omitempty"`
|
||||
fields map[string]*QueryHighlight `structs:"fields"`
|
||||
params highlighParams
|
||||
}
|
||||
|
@ -64,13 +64,13 @@ func newHighlight() *QueryHighlight {
|
|||
|
||||
// PreTags sets the highlight query's pre_tags ignore unmapped field
|
||||
func (q *QueryHighlight) PreTags(s ...string) *QueryHighlight {
|
||||
q.params.PreTags = append(q.params.PreTags,s...)
|
||||
q.params.PreTags = append(q.params.PreTags, s...)
|
||||
return q
|
||||
}
|
||||
|
||||
// PostTags sets the highlight query's post_tags ignore unmapped field
|
||||
func (q *QueryHighlight) PostTags(s ...string) *QueryHighlight {
|
||||
q.params.PostTags = append(q.params.PostTags,s...)
|
||||
q.params.PostTags = append(q.params.PostTags, s...)
|
||||
return q
|
||||
}
|
||||
|
||||
|
@ -166,7 +166,7 @@ func (q *QueryHighlight) HighlightQuery(b Mappable) *QueryHighlight {
|
|||
|
||||
// MatchedFields sets the highlight query's matched_fields ignore unmapped field
|
||||
func (q *QueryHighlight) MatchedFields(s ...string) *QueryHighlight {
|
||||
q.params.MatchedFields = append(q.params.MatchedFields,s...)
|
||||
q.params.MatchedFields = append(q.params.MatchedFields, s...)
|
||||
return q
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ func TestHighlight(t *testing.T) {
|
|||
"simple highlight",
|
||||
Highlight().Field("content"),
|
||||
map[string]interface{}{
|
||||
"fields": map[string]interface{} {
|
||||
"fields": map[string]interface{}{
|
||||
"content": map[string]interface{}{},
|
||||
},
|
||||
},
|
||||
|
@ -18,69 +18,69 @@ func TestHighlight(t *testing.T) {
|
|||
{
|
||||
"highlight all params",
|
||||
Highlight().
|
||||
PreTags("<pre>","<code>").
|
||||
PostTags("</code>","</pre>").
|
||||
Field("content",
|
||||
PreTags("<pre>", "<code>").
|
||||
PostTags("</code>", "</pre>").
|
||||
Field("content",
|
||||
Highlight().
|
||||
BoundaryChars(".;,")).
|
||||
FragmentSize(150).
|
||||
NumberOfFragments(4).
|
||||
Type(HighlighterPlain).
|
||||
BoundaryChars("()[]").
|
||||
BoundaryMaxScan(32).
|
||||
BoundaryScanner(BoundaryScannerChars).
|
||||
BoundaryScannerLocale("en-US").
|
||||
Encoder(EncoderHtml).
|
||||
ForceSource(true).
|
||||
Fragmenter(FragmenterSimple).
|
||||
FragmentOffset(6).
|
||||
HighlightQuery(
|
||||
BoundaryChars(".;,")).
|
||||
FragmentSize(150).
|
||||
NumberOfFragments(4).
|
||||
Type(HighlighterPlain).
|
||||
BoundaryChars("()[]").
|
||||
BoundaryMaxScan(32).
|
||||
BoundaryScanner(BoundaryScannerChars).
|
||||
BoundaryScannerLocale("en-US").
|
||||
Encoder(EncoderHtml).
|
||||
ForceSource(true).
|
||||
Fragmenter(FragmenterSimple).
|
||||
FragmentOffset(6).
|
||||
HighlightQuery(
|
||||
Bool().
|
||||
Must(
|
||||
Match("author").
|
||||
Query("some guy").
|
||||
Analyzer("analyzer?").
|
||||
Fuzziness("fuzz"))).
|
||||
MatchedFields("title","body").
|
||||
NoMatchSize(64).
|
||||
Order(OrderScore).
|
||||
PhraseLimit(512).
|
||||
RequireFieldMatch(false).
|
||||
TagsSchema(TagsSchemaStyled),
|
||||
Must(
|
||||
Match("author").
|
||||
Query("some guy").
|
||||
Analyzer("analyzer?").
|
||||
Fuzziness("fuzz"))).
|
||||
MatchedFields("title", "body").
|
||||
NoMatchSize(64).
|
||||
Order(OrderScore).
|
||||
PhraseLimit(512).
|
||||
RequireFieldMatch(false).
|
||||
TagsSchema(TagsSchemaStyled),
|
||||
map[string]interface{}{
|
||||
"pre_tags": []string{"<pre>","<code>"},
|
||||
"post_tags": []string{"</code>","</pre>"},
|
||||
"fragment_size": 150,
|
||||
"number_of_fragments": 4,
|
||||
"type": "plain",
|
||||
"boundary_chars": "()[]",
|
||||
"boundary_scanner": "chars",
|
||||
"boundary_max_scan": 32,
|
||||
"pre_tags": []string{"<pre>", "<code>"},
|
||||
"post_tags": []string{"</code>", "</pre>"},
|
||||
"fragment_size": 150,
|
||||
"number_of_fragments": 4,
|
||||
"type": "plain",
|
||||
"boundary_chars": "()[]",
|
||||
"boundary_scanner": "chars",
|
||||
"boundary_max_scan": 32,
|
||||
"boundary_scanner_locale": "en-US",
|
||||
"encoder": "html",
|
||||
"force_source": true,
|
||||
"fragment_offset": 6,
|
||||
"fragmenter": "simple",
|
||||
"matched_fields": []string{"title","body"},
|
||||
"no_match_size": 64,
|
||||
"order": "score",
|
||||
"phrase_limit": 512,
|
||||
"require_field_match": false,
|
||||
"tags_schema": "styled",
|
||||
"encoder": "html",
|
||||
"force_source": true,
|
||||
"fragment_offset": 6,
|
||||
"fragmenter": "simple",
|
||||
"matched_fields": []string{"title", "body"},
|
||||
"no_match_size": 64,
|
||||
"order": "score",
|
||||
"phrase_limit": 512,
|
||||
"require_field_match": false,
|
||||
"tags_schema": "styled",
|
||||
"fields": map[string]interface{}{
|
||||
"content": map[string]interface{}{
|
||||
"boundary_chars": ".;,",
|
||||
"boundary_chars": ".;,",
|
||||
},
|
||||
},
|
||||
"query": map[string]interface{} {
|
||||
"bool": map[string]interface{} {
|
||||
"must": []map[string]interface{} {
|
||||
"query": map[string]interface{}{
|
||||
"bool": map[string]interface{}{
|
||||
"must": []map[string]interface{}{
|
||||
{
|
||||
"match": map[string]interface{} {
|
||||
"author": map[string]interface{} {
|
||||
"analyzer": "analyzer?",
|
||||
"match": map[string]interface{}{
|
||||
"author": map[string]interface{}{
|
||||
"analyzer": "analyzer?",
|
||||
"fuzziness": "fuzz",
|
||||
"query": "some guy",
|
||||
"query": "some guy",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -17,23 +17,23 @@ func (q *MultiMatchQuery) Map() map[string]interface{} {
|
|||
}
|
||||
|
||||
type multiMatchParams struct {
|
||||
Qry interface{} `structs:"query"`
|
||||
Fields []string `structs:"fields"`
|
||||
Type MultiMatchType `structs:"type,string,omitempty"`
|
||||
TieBrk float32 `structs:"tie_breaker,omitempty"`
|
||||
Boost float32 `structs:"boost,omitempty"`
|
||||
Anl string `structs:"analyzer,omitempty"`
|
||||
AutoGenerate *bool `structs:"auto_generate_synonyms_phrase_query,omitempty"`
|
||||
Fuzz string `structs:"fuzziness,omitempty"`
|
||||
MaxExp uint16 `structs:"max_expansions,omitempty"`
|
||||
PrefLen uint16 `structs:"prefix_length,omitempty"`
|
||||
Trans *bool `structs:"transpositions,omitempty"`
|
||||
FuzzyRw string `structs:"fuzzy_rewrite,omitempty"`
|
||||
Lent *bool `structs:"lenient,omitempty"`
|
||||
Op MatchOperator `structs:"operator,string,omitempty"`
|
||||
MinMatch string `structs:"minimum_should_match,omitempty"`
|
||||
ZeroTerms ZeroTerms `structs:"zero_terms_query,string,omitempty"`
|
||||
Slp uint16 `structs:"slop,omitempty"`
|
||||
Qry interface{} `structs:"query"`
|
||||
Fields []string `structs:"fields"`
|
||||
Type MultiMatchType `structs:"type,string,omitempty"`
|
||||
TieBrk float32 `structs:"tie_breaker,omitempty"`
|
||||
Boost float32 `structs:"boost,omitempty"`
|
||||
Anl string `structs:"analyzer,omitempty"`
|
||||
AutoGenerate *bool `structs:"auto_generate_synonyms_phrase_query,omitempty"`
|
||||
Fuzz string `structs:"fuzziness,omitempty"`
|
||||
MaxExp uint16 `structs:"max_expansions,omitempty"`
|
||||
PrefLen uint16 `structs:"prefix_length,omitempty"`
|
||||
Trans *bool `structs:"transpositions,omitempty"`
|
||||
FuzzyRw string `structs:"fuzzy_rewrite,omitempty"`
|
||||
Lent *bool `structs:"lenient,omitempty"`
|
||||
Op MatchOperator `structs:"operator,string,omitempty"`
|
||||
MinMatch string `structs:"minimum_should_match,omitempty"`
|
||||
ZeroTerms ZeroTerms `structs:"zero_terms_query,string,omitempty"`
|
||||
Slp uint16 `structs:"slop,omitempty"`
|
||||
}
|
||||
|
||||
// MultiMatch creates a new query of type "multi_match"
|
||||
|
|
|
@ -8,53 +8,53 @@ func TestMultiMatch(t *testing.T) {
|
|||
runMapTests(t, []mapTest{
|
||||
{
|
||||
"simple multi_match",
|
||||
MultiMatch("value1","value2").Fields("title"),
|
||||
MultiMatch("value1", "value2").Fields("title"),
|
||||
map[string]interface{}{
|
||||
"multi_match": map[string]interface{}{
|
||||
"fields": []string{"title"},
|
||||
"query": "value2",
|
||||
"query": "value2",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"multi_match all params",
|
||||
MultiMatch("original").
|
||||
Query("test").
|
||||
Analyzer("stop").
|
||||
Fields("title","body").
|
||||
AutoGenerateSynonymsPhraseQuery(true).
|
||||
Fuzziness("AUTO").
|
||||
MaxExpansions(16).
|
||||
PrefixLength(12).
|
||||
TieBreaker(0.3).
|
||||
Boost(6.4).
|
||||
Transpositions(true).
|
||||
FuzzyRewrite("scoring_boolean").
|
||||
Lenient(true).
|
||||
Operator(OperatorAnd).
|
||||
Type(MatchTypePhrase).
|
||||
MinimumShouldMatch("3<90%").
|
||||
Slop(2).
|
||||
ZeroTermsQuery(ZeroTermsAll),
|
||||
Query("test").
|
||||
Analyzer("stop").
|
||||
Fields("title", "body").
|
||||
AutoGenerateSynonymsPhraseQuery(true).
|
||||
Fuzziness("AUTO").
|
||||
MaxExpansions(16).
|
||||
PrefixLength(12).
|
||||
TieBreaker(0.3).
|
||||
Boost(6.4).
|
||||
Transpositions(true).
|
||||
FuzzyRewrite("scoring_boolean").
|
||||
Lenient(true).
|
||||
Operator(OperatorAnd).
|
||||
Type(MatchTypePhrase).
|
||||
MinimumShouldMatch("3<90%").
|
||||
Slop(2).
|
||||
ZeroTermsQuery(ZeroTermsAll),
|
||||
map[string]interface{}{
|
||||
"multi_match": map[string]interface{}{
|
||||
"analyzer": "stop",
|
||||
"analyzer": "stop",
|
||||
"auto_generate_synonyms_phrase_query": true,
|
||||
"boost": 6.4,
|
||||
"fuzziness": "AUTO",
|
||||
"fuzzy_rewrite": "scoring_boolean",
|
||||
"lenient": true,
|
||||
"max_expansions": 16,
|
||||
"minimum_should_match": "3<90%",
|
||||
"prefix_length": 12,
|
||||
"transpositions": true,
|
||||
"type": "phrase",
|
||||
"tie_breaker": 0.3,
|
||||
"operator": "AND",
|
||||
"zero_terms_query": "all",
|
||||
"slop": 2,
|
||||
"query": "test",
|
||||
"fields": []string{"title","body"},
|
||||
"boost": 6.4,
|
||||
"fuzziness": "AUTO",
|
||||
"fuzzy_rewrite": "scoring_boolean",
|
||||
"lenient": true,
|
||||
"max_expansions": 16,
|
||||
"minimum_should_match": "3<90%",
|
||||
"prefix_length": 12,
|
||||
"transpositions": true,
|
||||
"type": "phrase",
|
||||
"tie_breaker": 0.3,
|
||||
"operator": "AND",
|
||||
"zero_terms_query": "all",
|
||||
"slop": 2,
|
||||
"query": "test",
|
||||
"fields": []string{"title", "body"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
Loading…
Reference in New Issue