search-collapse #6
|
@ -1,16 +1,16 @@
|
||||||
package esquery
|
package esquery
|
||||||
|
|
||||||
type FilterAggregation struct {
|
type FilterAggregation struct {
|
||||||
name string
|
name string
|
||||||
filter Mappable
|
filter Mappable
|
||||||
aggs []Aggregation
|
aggs []Aggregation
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter creates a new aggregation of type "filter". The method name includes
|
// Filter creates a new aggregation of type "filter". The method name includes
|
||||||
// the "Agg" suffix to prevent conflict with the "filter" query.
|
// the "Agg" suffix to prevent conflict with the "filter" query.
|
||||||
func FilterAgg(name string, filter Mappable) *FilterAggregation {
|
func FilterAgg(name string, filter Mappable) *FilterAggregation {
|
||||||
return &FilterAggregation{
|
return &FilterAggregation{
|
||||||
name: name,
|
name: name,
|
||||||
filter: filter,
|
filter: filter,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,11 +6,11 @@ func TestFilterAggs(t *testing.T) {
|
||||||
runMapTests(t, []mapTest{
|
runMapTests(t, []mapTest{
|
||||||
{
|
{
|
||||||
"filter agg: simple",
|
"filter agg: simple",
|
||||||
FilterAgg("filtered", Term("type","t-shirt")),
|
FilterAgg("filtered", Term("type", "t-shirt")),
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"filter": map[string]interface{}{
|
"filter": map[string]interface{}{
|
||||||
"term": map[string]interface{} {
|
"term": map[string]interface{}{
|
||||||
"type": map[string]interface{} {
|
"type": map[string]interface{}{
|
||||||
"value": "t-shirt",
|
"value": "t-shirt",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -19,18 +19,18 @@ func TestFilterAggs(t *testing.T) {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"filter agg: with aggs",
|
"filter agg: with aggs",
|
||||||
FilterAgg("filtered", Term("type","t-shirt")).
|
FilterAgg("filtered", Term("type", "t-shirt")).
|
||||||
Aggs(Avg("avg_price","price")),
|
Aggs(Avg("avg_price", "price")),
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"filter": map[string]interface{}{
|
"filter": map[string]interface{}{
|
||||||
"term": map[string]interface{} {
|
"term": map[string]interface{}{
|
||||||
"type": map[string]interface{} {
|
"type": map[string]interface{}{
|
||||||
"value": "t-shirt",
|
"value": "t-shirt",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"aggs": map[string]interface{} {
|
"aggs": map[string]interface{}{
|
||||||
"avg_price": map[string]interface{} {
|
"avg_price": map[string]interface{}{
|
||||||
"avg": map[string]interface{}{
|
"avg": map[string]interface{}{
|
||||||
"field": "price",
|
"field": "price",
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
package esquery
|
package esquery
|
||||||
|
|
||||||
type NestedAggregation struct {
|
type NestedAggregation struct {
|
||||||
name string
|
name string
|
||||||
path string
|
path string
|
||||||
aggs []Aggregation
|
aggs []Aggregation
|
||||||
}
|
}
|
||||||
|
|
||||||
// NestedAgg creates a new aggregation of type "nested". The method name includes
|
// NestedAgg creates a new aggregation of type "nested". The method name includes
|
||||||
// the "Agg" suffix to prevent conflict with the "nested" query.
|
// the "Agg" suffix to prevent conflict with the "nested" query.
|
||||||
func NestedAgg(name string, path string) *NestedAggregation {
|
func NestedAgg(name string, path string) *NestedAggregation {
|
||||||
return &NestedAggregation{
|
return &NestedAggregation{
|
||||||
name: name,
|
name: name,
|
||||||
path: path,
|
path: path,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,14 +16,14 @@ func TestNestedAggs(t *testing.T) {
|
||||||
{
|
{
|
||||||
"nested agg: with aggs",
|
"nested agg: with aggs",
|
||||||
NestedAgg("more_nested", "authors").
|
NestedAgg("more_nested", "authors").
|
||||||
Aggs(TermsAgg("authors","name")),
|
Aggs(TermsAgg("authors", "name")),
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"nested": map[string]interface{}{
|
"nested": map[string]interface{}{
|
||||||
"path": "authors",
|
"path": "authors",
|
||||||
},
|
},
|
||||||
"aggs": map[string]interface{} {
|
"aggs": map[string]interface{}{
|
||||||
"authors": map[string]interface{} {
|
"authors": map[string]interface{}{
|
||||||
"terms": map[string]interface{} {
|
"terms": map[string]interface{}{
|
||||||
"field": "name",
|
"field": "name",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -22,7 +22,7 @@ func (q *QueryHighlight) Map() map[string]interface{} {
|
||||||
}
|
}
|
||||||
|
|
||||||
type QueryHighlight struct {
|
type QueryHighlight struct {
|
||||||
highlightQuery Mappable `structs:"highlight_query,omitempty"`
|
highlightQuery Mappable `structs:"highlight_query,omitempty"`
|
||||||
fields map[string]*QueryHighlight `structs:"fields"`
|
fields map[string]*QueryHighlight `structs:"fields"`
|
||||||
params highlighParams
|
params highlighParams
|
||||||
}
|
}
|
||||||
|
@ -64,13 +64,13 @@ func newHighlight() *QueryHighlight {
|
||||||
|
|
||||||
// PreTags sets the highlight query's pre_tags ignore unmapped field
|
// PreTags sets the highlight query's pre_tags ignore unmapped field
|
||||||
func (q *QueryHighlight) PreTags(s ...string) *QueryHighlight {
|
func (q *QueryHighlight) PreTags(s ...string) *QueryHighlight {
|
||||||
q.params.PreTags = append(q.params.PreTags,s...)
|
q.params.PreTags = append(q.params.PreTags, s...)
|
||||||
return q
|
return q
|
||||||
}
|
}
|
||||||
|
|
||||||
// PostTags sets the highlight query's post_tags ignore unmapped field
|
// PostTags sets the highlight query's post_tags ignore unmapped field
|
||||||
func (q *QueryHighlight) PostTags(s ...string) *QueryHighlight {
|
func (q *QueryHighlight) PostTags(s ...string) *QueryHighlight {
|
||||||
q.params.PostTags = append(q.params.PostTags,s...)
|
q.params.PostTags = append(q.params.PostTags, s...)
|
||||||
return q
|
return q
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ func (q *QueryHighlight) HighlightQuery(b Mappable) *QueryHighlight {
|
||||||
|
|
||||||
// MatchedFields sets the highlight query's matched_fields ignore unmapped field
|
// MatchedFields sets the highlight query's matched_fields ignore unmapped field
|
||||||
func (q *QueryHighlight) MatchedFields(s ...string) *QueryHighlight {
|
func (q *QueryHighlight) MatchedFields(s ...string) *QueryHighlight {
|
||||||
q.params.MatchedFields = append(q.params.MatchedFields,s...)
|
q.params.MatchedFields = append(q.params.MatchedFields, s...)
|
||||||
return q
|
return q
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ func TestHighlight(t *testing.T) {
|
||||||
"simple highlight",
|
"simple highlight",
|
||||||
Highlight().Field("content"),
|
Highlight().Field("content"),
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"fields": map[string]interface{} {
|
"fields": map[string]interface{}{
|
||||||
"content": map[string]interface{}{},
|
"content": map[string]interface{}{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -18,69 +18,69 @@ func TestHighlight(t *testing.T) {
|
||||||
{
|
{
|
||||||
"highlight all params",
|
"highlight all params",
|
||||||
Highlight().
|
Highlight().
|
||||||
PreTags("<pre>","<code>").
|
PreTags("<pre>", "<code>").
|
||||||
PostTags("</code>","</pre>").
|
PostTags("</code>", "</pre>").
|
||||||
Field("content",
|
Field("content",
|
||||||
Highlight().
|
Highlight().
|
||||||
BoundaryChars(".;,")).
|
BoundaryChars(".;,")).
|
||||||
FragmentSize(150).
|
FragmentSize(150).
|
||||||
NumberOfFragments(4).
|
NumberOfFragments(4).
|
||||||
Type(HighlighterPlain).
|
Type(HighlighterPlain).
|
||||||
BoundaryChars("()[]").
|
BoundaryChars("()[]").
|
||||||
BoundaryMaxScan(32).
|
BoundaryMaxScan(32).
|
||||||
BoundaryScanner(BoundaryScannerChars).
|
BoundaryScanner(BoundaryScannerChars).
|
||||||
BoundaryScannerLocale("en-US").
|
BoundaryScannerLocale("en-US").
|
||||||
Encoder(EncoderHtml).
|
Encoder(EncoderHtml).
|
||||||
ForceSource(true).
|
ForceSource(true).
|
||||||
Fragmenter(FragmenterSimple).
|
Fragmenter(FragmenterSimple).
|
||||||
FragmentOffset(6).
|
FragmentOffset(6).
|
||||||
HighlightQuery(
|
HighlightQuery(
|
||||||
Bool().
|
Bool().
|
||||||
Must(
|
Must(
|
||||||
Match("author").
|
Match("author").
|
||||||
Query("some guy").
|
Query("some guy").
|
||||||
Analyzer("analyzer?").
|
Analyzer("analyzer?").
|
||||||
Fuzziness("fuzz"))).
|
Fuzziness("fuzz"))).
|
||||||
MatchedFields("title","body").
|
MatchedFields("title", "body").
|
||||||
NoMatchSize(64).
|
NoMatchSize(64).
|
||||||
Order(OrderScore).
|
Order(OrderScore).
|
||||||
PhraseLimit(512).
|
PhraseLimit(512).
|
||||||
RequireFieldMatch(false).
|
RequireFieldMatch(false).
|
||||||
TagsSchema(TagsSchemaStyled),
|
TagsSchema(TagsSchemaStyled),
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"pre_tags": []string{"<pre>","<code>"},
|
"pre_tags": []string{"<pre>", "<code>"},
|
||||||
"post_tags": []string{"</code>","</pre>"},
|
"post_tags": []string{"</code>", "</pre>"},
|
||||||
"fragment_size": 150,
|
"fragment_size": 150,
|
||||||
"number_of_fragments": 4,
|
"number_of_fragments": 4,
|
||||||
"type": "plain",
|
"type": "plain",
|
||||||
"boundary_chars": "()[]",
|
"boundary_chars": "()[]",
|
||||||
"boundary_scanner": "chars",
|
"boundary_scanner": "chars",
|
||||||
"boundary_max_scan": 32,
|
"boundary_max_scan": 32,
|
||||||
"boundary_scanner_locale": "en-US",
|
"boundary_scanner_locale": "en-US",
|
||||||
"encoder": "html",
|
"encoder": "html",
|
||||||
"force_source": true,
|
"force_source": true,
|
||||||
"fragment_offset": 6,
|
"fragment_offset": 6,
|
||||||
"fragmenter": "simple",
|
"fragmenter": "simple",
|
||||||
"matched_fields": []string{"title","body"},
|
"matched_fields": []string{"title", "body"},
|
||||||
"no_match_size": 64,
|
"no_match_size": 64,
|
||||||
"order": "score",
|
"order": "score",
|
||||||
"phrase_limit": 512,
|
"phrase_limit": 512,
|
||||||
"require_field_match": false,
|
"require_field_match": false,
|
||||||
"tags_schema": "styled",
|
"tags_schema": "styled",
|
||||||
"fields": map[string]interface{}{
|
"fields": map[string]interface{}{
|
||||||
"content": map[string]interface{}{
|
"content": map[string]interface{}{
|
||||||
"boundary_chars": ".;,",
|
"boundary_chars": ".;,",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"query": map[string]interface{} {
|
"query": map[string]interface{}{
|
||||||
"bool": map[string]interface{} {
|
"bool": map[string]interface{}{
|
||||||
"must": []map[string]interface{} {
|
"must": []map[string]interface{}{
|
||||||
{
|
{
|
||||||
"match": map[string]interface{} {
|
"match": map[string]interface{}{
|
||||||
"author": map[string]interface{} {
|
"author": map[string]interface{}{
|
||||||
"analyzer": "analyzer?",
|
"analyzer": "analyzer?",
|
||||||
"fuzziness": "fuzz",
|
"fuzziness": "fuzz",
|
||||||
"query": "some guy",
|
"query": "some guy",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -17,23 +17,23 @@ func (q *MultiMatchQuery) Map() map[string]interface{} {
|
||||||
}
|
}
|
||||||
|
|
||||||
type multiMatchParams struct {
|
type multiMatchParams struct {
|
||||||
Qry interface{} `structs:"query"`
|
Qry interface{} `structs:"query"`
|
||||||
Fields []string `structs:"fields"`
|
Fields []string `structs:"fields"`
|
||||||
Type MultiMatchType `structs:"type,string,omitempty"`
|
Type MultiMatchType `structs:"type,string,omitempty"`
|
||||||
TieBrk float32 `structs:"tie_breaker,omitempty"`
|
TieBrk float32 `structs:"tie_breaker,omitempty"`
|
||||||
Boost float32 `structs:"boost,omitempty"`
|
Boost float32 `structs:"boost,omitempty"`
|
||||||
Anl string `structs:"analyzer,omitempty"`
|
Anl string `structs:"analyzer,omitempty"`
|
||||||
AutoGenerate *bool `structs:"auto_generate_synonyms_phrase_query,omitempty"`
|
AutoGenerate *bool `structs:"auto_generate_synonyms_phrase_query,omitempty"`
|
||||||
Fuzz string `structs:"fuzziness,omitempty"`
|
Fuzz string `structs:"fuzziness,omitempty"`
|
||||||
MaxExp uint16 `structs:"max_expansions,omitempty"`
|
MaxExp uint16 `structs:"max_expansions,omitempty"`
|
||||||
PrefLen uint16 `structs:"prefix_length,omitempty"`
|
PrefLen uint16 `structs:"prefix_length,omitempty"`
|
||||||
Trans *bool `structs:"transpositions,omitempty"`
|
Trans *bool `structs:"transpositions,omitempty"`
|
||||||
FuzzyRw string `structs:"fuzzy_rewrite,omitempty"`
|
FuzzyRw string `structs:"fuzzy_rewrite,omitempty"`
|
||||||
Lent *bool `structs:"lenient,omitempty"`
|
Lent *bool `structs:"lenient,omitempty"`
|
||||||
Op MatchOperator `structs:"operator,string,omitempty"`
|
Op MatchOperator `structs:"operator,string,omitempty"`
|
||||||
MinMatch string `structs:"minimum_should_match,omitempty"`
|
MinMatch string `structs:"minimum_should_match,omitempty"`
|
||||||
ZeroTerms ZeroTerms `structs:"zero_terms_query,string,omitempty"`
|
ZeroTerms ZeroTerms `structs:"zero_terms_query,string,omitempty"`
|
||||||
Slp uint16 `structs:"slop,omitempty"`
|
Slp uint16 `structs:"slop,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// MultiMatch creates a new query of type "multi_match"
|
// MultiMatch creates a new query of type "multi_match"
|
||||||
|
|
|
@ -8,53 +8,53 @@ func TestMultiMatch(t *testing.T) {
|
||||||
runMapTests(t, []mapTest{
|
runMapTests(t, []mapTest{
|
||||||
{
|
{
|
||||||
"simple multi_match",
|
"simple multi_match",
|
||||||
MultiMatch("value1","value2").Fields("title"),
|
MultiMatch("value1", "value2").Fields("title"),
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"multi_match": map[string]interface{}{
|
"multi_match": map[string]interface{}{
|
||||||
"fields": []string{"title"},
|
"fields": []string{"title"},
|
||||||
"query": "value2",
|
"query": "value2",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"multi_match all params",
|
"multi_match all params",
|
||||||
MultiMatch("original").
|
MultiMatch("original").
|
||||||
Query("test").
|
Query("test").
|
||||||
Analyzer("stop").
|
Analyzer("stop").
|
||||||
Fields("title","body").
|
Fields("title", "body").
|
||||||
AutoGenerateSynonymsPhraseQuery(true).
|
AutoGenerateSynonymsPhraseQuery(true).
|
||||||
Fuzziness("AUTO").
|
Fuzziness("AUTO").
|
||||||
MaxExpansions(16).
|
MaxExpansions(16).
|
||||||
PrefixLength(12).
|
PrefixLength(12).
|
||||||
TieBreaker(0.3).
|
TieBreaker(0.3).
|
||||||
Boost(6.4).
|
Boost(6.4).
|
||||||
Transpositions(true).
|
Transpositions(true).
|
||||||
FuzzyRewrite("scoring_boolean").
|
FuzzyRewrite("scoring_boolean").
|
||||||
Lenient(true).
|
Lenient(true).
|
||||||
Operator(OperatorAnd).
|
Operator(OperatorAnd).
|
||||||
Type(MatchTypePhrase).
|
Type(MatchTypePhrase).
|
||||||
MinimumShouldMatch("3<90%").
|
MinimumShouldMatch("3<90%").
|
||||||
Slop(2).
|
Slop(2).
|
||||||
ZeroTermsQuery(ZeroTermsAll),
|
ZeroTermsQuery(ZeroTermsAll),
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"multi_match": map[string]interface{}{
|
"multi_match": map[string]interface{}{
|
||||||
"analyzer": "stop",
|
"analyzer": "stop",
|
||||||
"auto_generate_synonyms_phrase_query": true,
|
"auto_generate_synonyms_phrase_query": true,
|
||||||
"boost": 6.4,
|
"boost": 6.4,
|
||||||
"fuzziness": "AUTO",
|
"fuzziness": "AUTO",
|
||||||
"fuzzy_rewrite": "scoring_boolean",
|
"fuzzy_rewrite": "scoring_boolean",
|
||||||
"lenient": true,
|
"lenient": true,
|
||||||
"max_expansions": 16,
|
"max_expansions": 16,
|
||||||
"minimum_should_match": "3<90%",
|
"minimum_should_match": "3<90%",
|
||||||
"prefix_length": 12,
|
"prefix_length": 12,
|
||||||
"transpositions": true,
|
"transpositions": true,
|
||||||
"type": "phrase",
|
"type": "phrase",
|
||||||
"tie_breaker": 0.3,
|
"tie_breaker": 0.3,
|
||||||
"operator": "AND",
|
"operator": "AND",
|
||||||
"zero_terms_query": "all",
|
"zero_terms_query": "all",
|
||||||
"slop": 2,
|
"slop": 2,
|
||||||
"query": "test",
|
"query": "test",
|
||||||
"fields": []string{"title","body"},
|
"fields": []string{"title", "body"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
Loading…
Reference in New Issue