d1353e1f7c
* update code.gitea.io/sdk/gitea v0.13.1 -> v0.13.2 * update github.com/go-swagger/go-swagger v0.25.0 -> v0.26.0 * update github.com/google/uuid v1.1.2 -> v1.2.0 * update github.com/klauspost/compress v1.11.3 -> v1.11.7 * update github.com/lib/pq 083382b7e6fc -> v1.9.0 * update github.com/markbates/goth v1.65.0 -> v1.66.1 * update github.com/mattn/go-sqlite3 v1.14.4 -> v1.14.6 * update github.com/mgechev/revive 246eac737dc7 -> v1.0.3 * update github.com/minio/minio-go/v7 v7.0.6 -> v7.0.7 * update github.com/niklasfasching/go-org v1.3.2 -> v1.4.0 * update github.com/olivere/elastic/v7 v7.0.21 -> v7.0.22 * update github.com/pquerna/otp v1.2.0 -> v1.3.0 * update github.com/xanzy/go-gitlab v0.39.0 -> v0.42.0 * update github.com/yuin/goldmark v1.2.1 -> v1.3.1
718 lines
25 KiB
Go
Vendored
718 lines
25 KiB
Go
Vendored
// Copyright 2012-present Oliver Eilhard. All rights reserved.
|
|
// Use of this source code is governed by a MIT-license.
|
|
// See http://olivere.mit-license.org/license.txt for details.
|
|
|
|
package elastic
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"net/http"
|
|
"net/url"
|
|
"reflect"
|
|
"strings"
|
|
|
|
"github.com/olivere/elastic/v7/uritemplates"
|
|
)
|
|
|
|
// XPackAsyncSearchSubmit is an XPack API for asynchronously
|
|
// searching for documents in Elasticsearch.
|
|
//
|
|
// For more details, see the documentation at
|
|
// https://www.elastic.co/guide/en/elasticsearch/reference/7.9/async-search.html
|
|
type XPackAsyncSearchSubmit struct {
|
|
client *Client
|
|
|
|
pretty *bool // pretty format the returned JSON response
|
|
human *bool // return human readable values for statistics
|
|
errorTrace *bool // include the stack trace of returned errors
|
|
filterPath []string // list of filters used to reduce the response
|
|
headers http.Header // custom request-level HTTP headers
|
|
|
|
searchSource *SearchSource // q
|
|
source interface{}
|
|
searchType string // search_type
|
|
index []string
|
|
typ []string
|
|
routing string // routing
|
|
preference string // preference
|
|
requestCache *bool // request_cache
|
|
ignoreUnavailable *bool // ignore_unavailable
|
|
ignoreThrottled *bool // ignore_throttled
|
|
allowNoIndices *bool // allow_no_indices
|
|
expandWildcards string // expand_wildcards
|
|
lenient *bool // lenient
|
|
maxResponseSize int64
|
|
allowPartialSearchResults *bool // allow_partial_search_results
|
|
typedKeys *bool // typed_keys
|
|
seqNoPrimaryTerm *bool // seq_no_primary_term
|
|
batchedReduceSize *int // batched_reduce_size
|
|
maxConcurrentShardRequests *int // max_concurrent_shard_requests
|
|
preFilterShardSize *int // pre_filter_shard_size
|
|
restTotalHitsAsInt *bool // rest_total_hits_as_int
|
|
|
|
ccsMinimizeRoundtrips *bool // ccs_minimize_roundtrips
|
|
|
|
waitForCompletionTimeout string // e.g. "1s"
|
|
keepOnCompletion *bool
|
|
keepAlive string // e.g. "1h"
|
|
}
|
|
|
|
// NewXPackAsyncSearchSubmit creates a new service for asynchronously
|
|
// searching in Elasticsearch.
|
|
func NewXPackAsyncSearchSubmit(client *Client) *XPackAsyncSearchSubmit {
|
|
builder := &XPackAsyncSearchSubmit{
|
|
client: client,
|
|
searchSource: NewSearchSource(),
|
|
}
|
|
return builder
|
|
}
|
|
|
|
// Pretty tells Elasticsearch whether to return a formatted JSON response.
|
|
func (s *XPackAsyncSearchSubmit) Pretty(pretty bool) *XPackAsyncSearchSubmit {
|
|
s.pretty = &pretty
|
|
return s
|
|
}
|
|
|
|
// Human specifies whether human readable values should be returned in
|
|
// the JSON response, e.g. "7.5mb".
|
|
func (s *XPackAsyncSearchSubmit) Human(human bool) *XPackAsyncSearchSubmit {
|
|
s.human = &human
|
|
return s
|
|
}
|
|
|
|
// ErrorTrace specifies whether to include the stack trace of returned errors.
|
|
func (s *XPackAsyncSearchSubmit) ErrorTrace(errorTrace bool) *XPackAsyncSearchSubmit {
|
|
s.errorTrace = &errorTrace
|
|
return s
|
|
}
|
|
|
|
// FilterPath specifies a list of filters used to reduce the response.
|
|
func (s *XPackAsyncSearchSubmit) FilterPath(filterPath ...string) *XPackAsyncSearchSubmit {
|
|
s.filterPath = filterPath
|
|
return s
|
|
}
|
|
|
|
// Header adds a header to the request.
|
|
func (s *XPackAsyncSearchSubmit) Header(name string, value string) *XPackAsyncSearchSubmit {
|
|
if s.headers == nil {
|
|
s.headers = http.Header{}
|
|
}
|
|
s.headers.Add(name, value)
|
|
return s
|
|
}
|
|
|
|
// Headers specifies the headers of the request.
|
|
func (s *XPackAsyncSearchSubmit) Headers(headers http.Header) *XPackAsyncSearchSubmit {
|
|
s.headers = headers
|
|
return s
|
|
}
|
|
|
|
// SearchSource sets the search source builder to use with this service.
|
|
func (s *XPackAsyncSearchSubmit) SearchSource(searchSource *SearchSource) *XPackAsyncSearchSubmit {
|
|
s.searchSource = searchSource
|
|
if s.searchSource == nil {
|
|
s.searchSource = NewSearchSource()
|
|
}
|
|
return s
|
|
}
|
|
|
|
// Source allows the user to set the request body manually without using
|
|
// any of the structs and interfaces in Elastic.
|
|
func (s *XPackAsyncSearchSubmit) Source(source interface{}) *XPackAsyncSearchSubmit {
|
|
s.source = source
|
|
return s
|
|
}
|
|
|
|
// Index sets the names of the indices to use for search.
|
|
func (s *XPackAsyncSearchSubmit) Index(index ...string) *XPackAsyncSearchSubmit {
|
|
s.index = append(s.index, index...)
|
|
return s
|
|
}
|
|
|
|
// Type adds search restrictions for a list of types.
|
|
//
|
|
// Deprecated: Types are in the process of being removed. Instead of using a type, prefer to
|
|
// filter on a field on the document.
|
|
func (s *XPackAsyncSearchSubmit) Type(typ ...string) *XPackAsyncSearchSubmit {
|
|
s.typ = append(s.typ, typ...)
|
|
return s
|
|
}
|
|
|
|
// Timeout sets the timeout to use, e.g. "1s" or "1000ms".
|
|
func (s *XPackAsyncSearchSubmit) Timeout(timeout string) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Timeout(timeout)
|
|
return s
|
|
}
|
|
|
|
// Profile sets the Profile API flag on the search source.
|
|
// When enabled, a search executed by this service will return query
|
|
// profiling data.
|
|
func (s *XPackAsyncSearchSubmit) Profile(profile bool) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Profile(profile)
|
|
return s
|
|
}
|
|
|
|
// Collapse adds field collapsing.
|
|
func (s *XPackAsyncSearchSubmit) Collapse(collapse *CollapseBuilder) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Collapse(collapse)
|
|
return s
|
|
}
|
|
|
|
// TimeoutInMillis sets the timeout in milliseconds.
|
|
func (s *XPackAsyncSearchSubmit) TimeoutInMillis(timeoutInMillis int) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.TimeoutInMillis(timeoutInMillis)
|
|
return s
|
|
}
|
|
|
|
// TerminateAfter specifies the maximum number of documents to collect for
|
|
// each shard, upon reaching which the query execution will terminate early.
|
|
func (s *XPackAsyncSearchSubmit) TerminateAfter(terminateAfter int) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.TerminateAfter(terminateAfter)
|
|
return s
|
|
}
|
|
|
|
// SearchType sets the search operation type. Valid values are:
|
|
// "dfs_query_then_fetch" and "query_then_fetch".
|
|
// See https://www.elastic.co/guide/en/elasticsearch/reference/7.0/search-request-search-type.html
|
|
// for details.
|
|
func (s *XPackAsyncSearchSubmit) SearchType(searchType string) *XPackAsyncSearchSubmit {
|
|
s.searchType = searchType
|
|
return s
|
|
}
|
|
|
|
// Routing is a list of specific routing values to control the shards
|
|
// the search will be executed on.
|
|
func (s *XPackAsyncSearchSubmit) Routing(routings ...string) *XPackAsyncSearchSubmit {
|
|
s.routing = strings.Join(routings, ",")
|
|
return s
|
|
}
|
|
|
|
// Preference sets the preference to execute the search. Defaults to
|
|
// randomize across shards ("random"). Can be set to "_local" to prefer
|
|
// local shards, "_primary" to execute on primary shards only,
|
|
// or a custom value which guarantees that the same order will be used
|
|
// across different requests.
|
|
func (s *XPackAsyncSearchSubmit) Preference(preference string) *XPackAsyncSearchSubmit {
|
|
s.preference = preference
|
|
return s
|
|
}
|
|
|
|
// RequestCache indicates whether the cache should be used for this
|
|
// request or not, defaults to index level setting.
|
|
func (s *XPackAsyncSearchSubmit) RequestCache(requestCache bool) *XPackAsyncSearchSubmit {
|
|
s.requestCache = &requestCache
|
|
return s
|
|
}
|
|
|
|
// Query sets the query to perform, e.g. MatchAllQuery.
|
|
func (s *XPackAsyncSearchSubmit) Query(query Query) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Query(query)
|
|
return s
|
|
}
|
|
|
|
// PostFilter will be executed after the query has been executed and
|
|
// only affects the search hits, not the aggregations.
|
|
// This filter is always executed as the last filtering mechanism.
|
|
func (s *XPackAsyncSearchSubmit) PostFilter(postFilter Query) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.PostFilter(postFilter)
|
|
return s
|
|
}
|
|
|
|
// FetchSource indicates whether the response should contain the stored
|
|
// _source for every hit.
|
|
func (s *XPackAsyncSearchSubmit) FetchSource(fetchSource bool) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.FetchSource(fetchSource)
|
|
return s
|
|
}
|
|
|
|
// FetchSourceContext indicates how the _source should be fetched.
|
|
func (s *XPackAsyncSearchSubmit) FetchSourceContext(fetchSourceContext *FetchSourceContext) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.FetchSourceContext(fetchSourceContext)
|
|
return s
|
|
}
|
|
|
|
// Highlight adds highlighting to the search.
|
|
func (s *XPackAsyncSearchSubmit) Highlight(highlight *Highlight) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Highlight(highlight)
|
|
return s
|
|
}
|
|
|
|
// GlobalSuggestText defines the global text to use with all suggesters.
|
|
// This avoids repetition.
|
|
func (s *XPackAsyncSearchSubmit) GlobalSuggestText(globalText string) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.GlobalSuggestText(globalText)
|
|
return s
|
|
}
|
|
|
|
// Suggester adds a suggester to the search.
|
|
func (s *XPackAsyncSearchSubmit) Suggester(suggester Suggester) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Suggester(suggester)
|
|
return s
|
|
}
|
|
|
|
// Aggregation adds an aggreation to perform as part of the search.
|
|
func (s *XPackAsyncSearchSubmit) Aggregation(name string, aggregation Aggregation) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Aggregation(name, aggregation)
|
|
return s
|
|
}
|
|
|
|
// MinScore sets the minimum score below which docs will be filtered out.
|
|
func (s *XPackAsyncSearchSubmit) MinScore(minScore float64) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.MinScore(minScore)
|
|
return s
|
|
}
|
|
|
|
// From index to start the search from. Defaults to 0.
|
|
func (s *XPackAsyncSearchSubmit) From(from int) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.From(from)
|
|
return s
|
|
}
|
|
|
|
// Size is the number of search hits to return. Defaults to 10.
|
|
func (s *XPackAsyncSearchSubmit) Size(size int) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Size(size)
|
|
return s
|
|
}
|
|
|
|
// Explain indicates whether each search hit should be returned with
|
|
// an explanation of the hit (ranking).
|
|
func (s *XPackAsyncSearchSubmit) Explain(explain bool) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Explain(explain)
|
|
return s
|
|
}
|
|
|
|
// Version indicates whether each search hit should be returned with
|
|
// a version associated to it.
|
|
func (s *XPackAsyncSearchSubmit) Version(version bool) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Version(version)
|
|
return s
|
|
}
|
|
|
|
// Sort adds a sort order.
|
|
func (s *XPackAsyncSearchSubmit) Sort(field string, ascending bool) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Sort(field, ascending)
|
|
return s
|
|
}
|
|
|
|
// SortWithInfo adds a sort order.
|
|
func (s *XPackAsyncSearchSubmit) SortWithInfo(info SortInfo) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.SortWithInfo(info)
|
|
return s
|
|
}
|
|
|
|
// SortBy adds a sort order.
|
|
func (s *XPackAsyncSearchSubmit) SortBy(sorter ...Sorter) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.SortBy(sorter...)
|
|
return s
|
|
}
|
|
|
|
// DocvalueField adds a single field to load from the field data cache
|
|
// and return as part of the search.
|
|
func (s *XPackAsyncSearchSubmit) DocvalueField(docvalueField string) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.DocvalueField(docvalueField)
|
|
return s
|
|
}
|
|
|
|
// DocvalueFieldWithFormat adds a single field to load from the field data cache
|
|
// and return as part of the search.
|
|
func (s *XPackAsyncSearchSubmit) DocvalueFieldWithFormat(docvalueField DocvalueField) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.DocvalueFieldWithFormat(docvalueField)
|
|
return s
|
|
}
|
|
|
|
// DocvalueFields adds one or more fields to load from the field data cache
|
|
// and return as part of the search.
|
|
func (s *XPackAsyncSearchSubmit) DocvalueFields(docvalueFields ...string) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.DocvalueFields(docvalueFields...)
|
|
return s
|
|
}
|
|
|
|
// DocvalueFieldsWithFormat adds one or more fields to load from the field data cache
|
|
// and return as part of the search.
|
|
func (s *XPackAsyncSearchSubmit) DocvalueFieldsWithFormat(docvalueFields ...DocvalueField) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.DocvalueFieldsWithFormat(docvalueFields...)
|
|
return s
|
|
}
|
|
|
|
// NoStoredFields indicates that no stored fields should be loaded, resulting in only
|
|
// id and type to be returned per field.
|
|
func (s *XPackAsyncSearchSubmit) NoStoredFields() *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.NoStoredFields()
|
|
return s
|
|
}
|
|
|
|
// StoredField adds a single field to load and return (note, must be stored) as
|
|
// part of the search request. If none are specified, the source of the
|
|
// document will be returned.
|
|
func (s *XPackAsyncSearchSubmit) StoredField(fieldName string) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.StoredField(fieldName)
|
|
return s
|
|
}
|
|
|
|
// StoredFields sets the fields to load and return as part of the search request.
|
|
// If none are specified, the source of the document will be returned.
|
|
func (s *XPackAsyncSearchSubmit) StoredFields(fields ...string) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.StoredFields(fields...)
|
|
return s
|
|
}
|
|
|
|
// TrackScores is applied when sorting and controls if scores will be
|
|
// tracked as well. Defaults to false.
|
|
func (s *XPackAsyncSearchSubmit) TrackScores(trackScores bool) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.TrackScores(trackScores)
|
|
return s
|
|
}
|
|
|
|
// TrackTotalHits controls if the total hit count for the query should be tracked.
|
|
//
|
|
// See https://www.elastic.co/guide/en/elasticsearch/reference/7.1/search-request-track-total-hits.html
|
|
// for details.
|
|
func (s *XPackAsyncSearchSubmit) TrackTotalHits(trackTotalHits interface{}) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.TrackTotalHits(trackTotalHits)
|
|
return s
|
|
}
|
|
|
|
// SearchAfter allows a different form of pagination by using a live cursor,
|
|
// using the results of the previous page to help the retrieval of the next.
|
|
//
|
|
// See https://www.elastic.co/guide/en/elasticsearch/reference/7.0/search-request-search-after.html
|
|
func (s *XPackAsyncSearchSubmit) SearchAfter(sortValues ...interface{}) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.SearchAfter(sortValues...)
|
|
return s
|
|
}
|
|
|
|
// DefaultRescoreWindowSize sets the rescore window size for rescores
|
|
// that don't specify their window.
|
|
func (s *XPackAsyncSearchSubmit) DefaultRescoreWindowSize(defaultRescoreWindowSize int) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.DefaultRescoreWindowSize(defaultRescoreWindowSize)
|
|
return s
|
|
}
|
|
|
|
// Rescorer adds a rescorer to the search.
|
|
func (s *XPackAsyncSearchSubmit) Rescorer(rescore *Rescore) *XPackAsyncSearchSubmit {
|
|
s.searchSource = s.searchSource.Rescorer(rescore)
|
|
return s
|
|
}
|
|
|
|
// IgnoreUnavailable indicates whether the specified concrete indices
|
|
// should be ignored when unavailable (missing or closed).
|
|
func (s *XPackAsyncSearchSubmit) IgnoreUnavailable(ignoreUnavailable bool) *XPackAsyncSearchSubmit {
|
|
s.ignoreUnavailable = &ignoreUnavailable
|
|
return s
|
|
}
|
|
|
|
// IgnoreThrottled indicates whether specified concrete, expanded or aliased
|
|
// indices should be ignored when throttled.
|
|
func (s *XPackAsyncSearchSubmit) IgnoreThrottled(ignoreThrottled bool) *XPackAsyncSearchSubmit {
|
|
s.ignoreThrottled = &ignoreThrottled
|
|
return s
|
|
}
|
|
|
|
// AllowNoIndices indicates whether to ignore if a wildcard indices
|
|
// expression resolves into no concrete indices. (This includes `_all` string
|
|
// or when no indices have been specified).
|
|
func (s *XPackAsyncSearchSubmit) AllowNoIndices(allowNoIndices bool) *XPackAsyncSearchSubmit {
|
|
s.allowNoIndices = &allowNoIndices
|
|
return s
|
|
}
|
|
|
|
// ExpandWildcards indicates whether to expand wildcard expression to
|
|
// concrete indices that are open, closed or both.
|
|
func (s *XPackAsyncSearchSubmit) ExpandWildcards(expandWildcards string) *XPackAsyncSearchSubmit {
|
|
s.expandWildcards = expandWildcards
|
|
return s
|
|
}
|
|
|
|
// Lenient specifies whether format-based query failures (such as providing
|
|
// text to a numeric field) should be ignored.
|
|
func (s *XPackAsyncSearchSubmit) Lenient(lenient bool) *XPackAsyncSearchSubmit {
|
|
s.lenient = &lenient
|
|
return s
|
|
}
|
|
|
|
// MaxResponseSize sets an upper limit on the response body size that we accept,
|
|
// to guard against OOM situations.
|
|
func (s *XPackAsyncSearchSubmit) MaxResponseSize(maxResponseSize int64) *XPackAsyncSearchSubmit {
|
|
s.maxResponseSize = maxResponseSize
|
|
return s
|
|
}
|
|
|
|
// AllowPartialSearchResults indicates if an error should be returned if
|
|
// there is a partial search failure or timeout.
|
|
func (s *XPackAsyncSearchSubmit) AllowPartialSearchResults(enabled bool) *XPackAsyncSearchSubmit {
|
|
s.allowPartialSearchResults = &enabled
|
|
return s
|
|
}
|
|
|
|
// TypedKeys specifies whether aggregation and suggester names should be
|
|
// prefixed by their respective types in the response.
|
|
func (s *XPackAsyncSearchSubmit) TypedKeys(enabled bool) *XPackAsyncSearchSubmit {
|
|
s.typedKeys = &enabled
|
|
return s
|
|
}
|
|
|
|
// SeqNoPrimaryTerm specifies whether to return sequence number and
|
|
// primary term of the last modification of each hit.
|
|
func (s *XPackAsyncSearchSubmit) SeqNoPrimaryTerm(enabled bool) *XPackAsyncSearchSubmit {
|
|
s.seqNoPrimaryTerm = &enabled
|
|
return s
|
|
}
|
|
|
|
// BatchedReduceSize specifies the number of shard results that should be reduced
|
|
// at once on the coordinating node. This value should be used as a protection
|
|
// mechanism to reduce the memory overhead per search request if the potential
|
|
// number of shards in the request can be large.
|
|
func (s *XPackAsyncSearchSubmit) BatchedReduceSize(size int) *XPackAsyncSearchSubmit {
|
|
s.batchedReduceSize = &size
|
|
return s
|
|
}
|
|
|
|
// MaxConcurrentShardRequests specifies the number of concurrent shard requests
|
|
// this search executes concurrently. This value should be used to limit the
|
|
// impact of the search on the cluster in order to limit the number of
|
|
// concurrent shard requests.
|
|
func (s *XPackAsyncSearchSubmit) MaxConcurrentShardRequests(max int) *XPackAsyncSearchSubmit {
|
|
s.maxConcurrentShardRequests = &max
|
|
return s
|
|
}
|
|
|
|
// PreFilterShardSize specifies a threshold that enforces a pre-filter roundtrip
|
|
// to prefilter search shards based on query rewriting if the number of shards
|
|
// the search request expands to exceeds the threshold. This filter roundtrip
|
|
// can limit the number of shards significantly if for instance a shard can
|
|
// not match any documents based on it's rewrite method i.e. if date filters are
|
|
// mandatory to match but the shard bounds and the query are disjoint.
|
|
func (s *XPackAsyncSearchSubmit) PreFilterShardSize(threshold int) *XPackAsyncSearchSubmit {
|
|
s.preFilterShardSize = &threshold
|
|
return s
|
|
}
|
|
|
|
// RestTotalHitsAsInt indicates whether hits.total should be rendered as an
|
|
// integer or an object in the rest search response.
|
|
func (s *XPackAsyncSearchSubmit) RestTotalHitsAsInt(enabled bool) *XPackAsyncSearchSubmit {
|
|
s.restTotalHitsAsInt = &enabled
|
|
return s
|
|
}
|
|
|
|
// CCSMinimizeRoundtrips indicates whether network round-trips should be minimized
|
|
// as part of cross-cluster search requests execution.
|
|
func (s *XPackAsyncSearchSubmit) CCSMinimizeRoundtrips(enabled bool) *XPackAsyncSearchSubmit {
|
|
s.ccsMinimizeRoundtrips = &enabled
|
|
return s
|
|
}
|
|
|
|
// WaitForCompletionTimeout is suitable for DoAsync only. It specifies the
|
|
// timeout for the Search to wait for completion before returning an ID to
|
|
// return the results asynchronously. In other words: If the search takes
|
|
// longer than this value (default is 1 second), then you need to call
|
|
// GetAsync to retrieve its final results.
|
|
func (s *XPackAsyncSearchSubmit) WaitForCompletionTimeout(timeout string) *XPackAsyncSearchSubmit {
|
|
s.waitForCompletionTimeout = timeout
|
|
return s
|
|
}
|
|
|
|
// KeepOnCompletion is suitable for DoAsync only. It indicates whether the
|
|
// asynchronous search ID and its results should be kept even after the
|
|
// search (and its results) are completed and retrieved.
|
|
func (s *XPackAsyncSearchSubmit) KeepOnCompletion(keepOnCompletion bool) *XPackAsyncSearchSubmit {
|
|
s.keepOnCompletion = &keepOnCompletion
|
|
return s
|
|
}
|
|
|
|
// KeepAlive can only be used with DoAsync. If set, KeepAlive specifies the
|
|
// duration after which search ID and its results are removed from the
|
|
// Elasticsearch cluster and hence can no longer be retrieved with GetAsync.
|
|
func (s *XPackAsyncSearchSubmit) KeepAlive(keepAlive string) *XPackAsyncSearchSubmit {
|
|
s.keepAlive = keepAlive
|
|
return s
|
|
}
|
|
|
|
// buildURL builds the URL for the operation.
|
|
func (s *XPackAsyncSearchSubmit) buildURL() (string, url.Values, error) {
|
|
var err error
|
|
var path string
|
|
|
|
if len(s.index) > 0 && len(s.typ) > 0 {
|
|
path, err = uritemplates.Expand("/{index}/{type}/_async_search", map[string]string{
|
|
"index": strings.Join(s.index, ","),
|
|
"type": strings.Join(s.typ, ","),
|
|
})
|
|
} else if len(s.index) > 0 {
|
|
path, err = uritemplates.Expand("/{index}/_async_search", map[string]string{
|
|
"index": strings.Join(s.index, ","),
|
|
})
|
|
} else if len(s.typ) > 0 {
|
|
path, err = uritemplates.Expand("/_all/{type}/_async_search", map[string]string{
|
|
"type": strings.Join(s.typ, ","),
|
|
})
|
|
} else {
|
|
path = "/_async_search"
|
|
}
|
|
if err != nil {
|
|
return "", url.Values{}, err
|
|
}
|
|
|
|
// Add query string parameters
|
|
params := url.Values{}
|
|
if v := s.pretty; v != nil {
|
|
params.Set("pretty", fmt.Sprint(*v))
|
|
}
|
|
if v := s.human; v != nil {
|
|
params.Set("human", fmt.Sprint(*v))
|
|
}
|
|
if v := s.errorTrace; v != nil {
|
|
params.Set("error_trace", fmt.Sprint(*v))
|
|
}
|
|
if len(s.filterPath) > 0 {
|
|
params.Set("filter_path", strings.Join(s.filterPath, ","))
|
|
}
|
|
if s.searchType != "" {
|
|
params.Set("search_type", s.searchType)
|
|
}
|
|
if s.routing != "" {
|
|
params.Set("routing", s.routing)
|
|
}
|
|
if s.preference != "" {
|
|
params.Set("preference", s.preference)
|
|
}
|
|
if v := s.requestCache; v != nil {
|
|
params.Set("request_cache", fmt.Sprint(*v))
|
|
}
|
|
if v := s.allowNoIndices; v != nil {
|
|
params.Set("allow_no_indices", fmt.Sprint(*v))
|
|
}
|
|
if s.expandWildcards != "" {
|
|
params.Set("expand_wildcards", s.expandWildcards)
|
|
}
|
|
if v := s.lenient; v != nil {
|
|
params.Set("lenient", fmt.Sprint(*v))
|
|
}
|
|
if v := s.ignoreUnavailable; v != nil {
|
|
params.Set("ignore_unavailable", fmt.Sprint(*v))
|
|
}
|
|
if v := s.ignoreThrottled; v != nil {
|
|
params.Set("ignore_throttled", fmt.Sprint(*v))
|
|
}
|
|
if s.seqNoPrimaryTerm != nil {
|
|
params.Set("seq_no_primary_term", fmt.Sprint(*s.seqNoPrimaryTerm))
|
|
}
|
|
if v := s.allowPartialSearchResults; v != nil {
|
|
params.Set("allow_partial_search_results", fmt.Sprint(*v))
|
|
}
|
|
if v := s.typedKeys; v != nil {
|
|
params.Set("typed_keys", fmt.Sprint(*v))
|
|
}
|
|
if v := s.batchedReduceSize; v != nil {
|
|
params.Set("batched_reduce_size", fmt.Sprint(*v))
|
|
}
|
|
if v := s.maxConcurrentShardRequests; v != nil {
|
|
params.Set("max_concurrent_shard_requests", fmt.Sprint(*v))
|
|
}
|
|
if v := s.preFilterShardSize; v != nil {
|
|
params.Set("pre_filter_shard_size", fmt.Sprint(*v))
|
|
}
|
|
if v := s.restTotalHitsAsInt; v != nil {
|
|
params.Set("rest_total_hits_as_int", fmt.Sprint(*v))
|
|
}
|
|
if v := s.ccsMinimizeRoundtrips; v != nil {
|
|
params.Set("ccs_minimize_roundtrips", fmt.Sprint(*v))
|
|
}
|
|
if s.waitForCompletionTimeout != "" {
|
|
params.Set("wait_for_completion_timeout", s.waitForCompletionTimeout)
|
|
}
|
|
if v := s.keepOnCompletion; v != nil {
|
|
params.Set("keep_on_completion", fmt.Sprint(*v))
|
|
}
|
|
if s.keepAlive != "" {
|
|
params.Set("keep_alive", s.keepAlive)
|
|
}
|
|
return path, params, nil
|
|
}
|
|
|
|
// Validate checks if the operation is valid.
|
|
func (s *XPackAsyncSearchSubmit) Validate() error {
|
|
return nil
|
|
}
|
|
|
|
// Do executes the search and returns a XPackAsyncSearchResult.
|
|
func (s *XPackAsyncSearchSubmit) Do(ctx context.Context) (*XPackAsyncSearchResult, error) {
|
|
// Check pre-conditions
|
|
if err := s.Validate(); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// Get URL for request
|
|
path, params, err := s.buildURL()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// Perform request
|
|
var body interface{}
|
|
if s.source != nil {
|
|
body = s.source
|
|
} else {
|
|
src, err := s.searchSource.Source()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
body = src
|
|
}
|
|
res, err := s.client.PerformRequest(ctx, PerformRequestOptions{
|
|
Method: "POST",
|
|
Path: path,
|
|
Params: params,
|
|
Body: body,
|
|
Headers: s.headers,
|
|
MaxResponseSize: s.maxResponseSize,
|
|
})
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// Return search results
|
|
ret := new(XPackAsyncSearchResult)
|
|
if err := s.client.decoder.Decode(res.Body, ret); err != nil {
|
|
ret.Header = res.Header
|
|
return nil, err
|
|
}
|
|
ret.Header = res.Header
|
|
return ret, nil
|
|
}
|
|
|
|
// XPackAsyncSearchResult is the outcome of starting an asynchronous search
|
|
// or retrieving a search result with XPackAsyncSearchGet.
|
|
type XPackAsyncSearchResult struct {
|
|
Header http.Header `json:"-"`
|
|
ID string `json:"id,omitempty"`
|
|
IsRunning bool `json:"is_running"`
|
|
IsPartial bool `json:"is_partial"`
|
|
StartTimeMillis int64 `json:"start_time_in_millis,omitempty"`
|
|
ExpirationTimeMillis int64 `json:"expiration_time_in_millis,omitempty"`
|
|
Response *SearchResult `json:"response,omitempty"`
|
|
Error *ErrorDetails `json:"error,omitempty"`
|
|
}
|
|
|
|
// Each is a utility function to iterate over all hits. It saves you from
|
|
// checking for nil values. Notice that Each will ignore errors in
|
|
// serializing JSON and hits with empty/nil _source will get an empty
|
|
// value
|
|
func (r *XPackAsyncSearchResult) Each(typ reflect.Type) []interface{} {
|
|
if r == nil || r.Response == nil || r.Response.Hits == nil || r.Response.Hits.Hits == nil || len(r.Response.Hits.Hits) == 0 {
|
|
return nil
|
|
}
|
|
var slice []interface{}
|
|
for _, hit := range r.Response.Hits.Hits {
|
|
v := reflect.New(typ).Elem()
|
|
if hit.Source == nil {
|
|
slice = append(slice, v.Interface())
|
|
continue
|
|
}
|
|
if err := json.Unmarshal(hit.Source, v.Addr().Interface()); err == nil {
|
|
slice = append(slice, v.Interface())
|
|
}
|
|
}
|
|
return slice
|
|
}
|