📄 client.rb
字号:
assert { !values.empty? }
if values.instance_of?(Array) && values.size > 0
values.each do |value|
assert { value.instance_of? Fixnum }
end
@filters << { 'type' => SPH_FILTER_VALUES, 'attr' => attribute, 'exclude' => exclude, 'values' => values }
end
end
# Set range filter.
#
# Only match those records where <tt>attribute</tt> column value
# is beetwen <tt>min</tt> and <tt>max</tt> (including <tt>min</tt> and <tt>max</tt>).
def SetFilterRange(attribute, min, max, exclude = false)
assert { attribute.instance_of? String }
assert { min.instance_of? Fixnum }
assert { max.instance_of? Fixnum }
assert { min <= max }
@filters << { 'type' => SPH_FILTER_RANGE, 'attr' => attribute, 'exclude' => exclude, 'min' => min, 'max' => max }
end
# Set float range filter.
#
# Only match those records where <tt>attribute</tt> column value
# is beetwen <tt>min</tt> and <tt>max</tt> (including <tt>min</tt> and <tt>max</tt>).
def SetFilterFloatRange(attribute, min, max, exclude = false)
assert { attribute.instance_of? String }
assert { min.instance_of? Float }
assert { max.instance_of? Float }
assert { min <= max }
@filters << { 'type' => SPH_FILTER_FLOATRANGE, 'attr' => attribute, 'exclude' => exclude, 'min' => min, 'max' => max }
end
# Setup anchor point for geosphere distance calculations.
#
# Required to use <tt>@geodist</tt> in filters and sorting
# distance will be computed to this point. Latitude and longitude
# must be in radians.
#
# * <tt>attrlat</tt> -- is the name of latitude attribute
# * <tt>attrlong</tt> -- is the name of longitude attribute
# * <tt>lat</tt> -- is anchor point latitude, in radians
# * <tt>long</tt> -- is anchor point longitude, in radians
def SetGeoAnchor(attrlat, attrlong, lat, long)
assert { attrlat.instance_of? String }
assert { attrlong.instance_of? String }
assert { lat.instance_of? Float }
assert { long.instance_of? Float }
@anchor = { 'attrlat' => attrlat, 'attrlong' => attrlong, 'lat' => lat, 'long' => long }
end
# Set grouping attribute and function.
#
# In grouping mode, all matches are assigned to different groups
# based on grouping function value.
#
# Each group keeps track of the total match count, and the best match
# (in this group) according to current sorting function.
#
# The final result set contains one best match per group, with
# grouping function value and matches count attached.
#
# Groups in result set could be sorted by any sorting clause,
# including both document attributes and the following special
# internal Sphinx attributes:
#
# * @id - match document ID;
# * @weight, @rank, @relevance - match weight;
# * @group - groupby function value;
# * @count - amount of matches in group.
#
# the default mode is to sort by groupby value in descending order,
# ie. by '@group desc'.
#
# 'total_found' would contain total amount of matching groups over
# the whole index.
#
# WARNING: grouping is done in fixed memory and thus its results
# are only approximate; so there might be more groups reported
# in total_found than actually present. @count might also
# be underestimated.
#
# For example, if sorting by relevance and grouping by "published"
# attribute with SPH_GROUPBY_DAY function, then the result set will
# contain one most relevant match per each day when there were any
# matches published, with day number and per-day match count attached,
# and sorted by day number in descending order (ie. recent days first).
def SetGroupBy(attribute, func, groupsort = '@group desc')
assert { attribute.instance_of? String }
assert { groupsort.instance_of? String }
assert { func == SPH_GROUPBY_DAY \
|| func == SPH_GROUPBY_WEEK \
|| func == SPH_GROUPBY_MONTH \
|| func == SPH_GROUPBY_YEAR \
|| func == SPH_GROUPBY_ATTR \
|| func == SPH_GROUPBY_ATTRPAIR }
@groupby = attribute
@groupfunc = func
@groupsort = groupsort
end
# Set count-distinct attribute for group-by queries.
def SetGroupDistinct(attribute)
assert { attribute.instance_of? String }
@groupdistinct = attribute
end
# Set distributed retries count and delay.
def SetRetries(count, delay = 0)
assert { count.instance_of? Fixnum }
assert { delay.instance_of? Fixnum }
@retrycount = count
@retrydelay = delay
end
# Clear all filters (for multi-queries).
def ResetFilters
@filters = []
@anchor = []
end
# Clear groupby settings (for multi-queries).
def ResetGroupBy
@groupby = ''
@groupfunc = SPH_GROUPBY_DAY
@groupsort = '@group desc'
@groupdistinct = ''
end
# Connect to searchd server and run given search query.
#
# <tt>query</tt> is query string
# <tt>index</tt> is index name (or names) to query. default value is "*" which means
# to query all indexes. Accepted characters for index names are letters, numbers,
# dash, and underscore; everything else is considered a separator. Therefore,
# all the following calls are valid and will search two indexes:
#
# sphinx.Query('test query', 'main delta')
# sphinx.Query('test query', 'main;delta')
# sphinx.Query('test query', 'main, delta')
#
# Index order matters. If identical IDs are found in two or more indexes,
# weight and attribute values from the very last matching index will be used
# for sorting and returning to client. Therefore, in the example above,
# matches from "delta" index will always "win" over matches from "main".
#
# Returns false on failure.
# Returns hash which has the following keys on success:
#
# * <tt>'matches'</tt> -- array of hashes {'weight', 'group', 'id'}, where 'id' is document_id.
# * <tt>'total'</tt> -- total amount of matches retrieved (upto SPH_MAX_MATCHES, see sphinx.h)
# * <tt>'total_found'</tt> -- total amount of matching documents in index
# * <tt>'time'</tt> -- search time
# * <tt>'words'</tt> -- hash which maps query terms (stemmed!) to ('docs', 'hits') hash
def Query(query, index = '*', comment = '')
assert { @reqs.empty? }
@reqs = []
self.AddQuery(query, index, comment)
results = self.RunQueries
# probably network error; error message should be already filled
return false unless results.instance_of?(Array)
@error = results[0]['error']
@warning = results[0]['warning']
return false if results[0]['status'] == SEARCHD_ERROR
return results[0]
end
# Add query to batch.
#
# Batch queries enable searchd to perform internal optimizations,
# if possible; and reduce network connection overheads in all cases.
#
# For instance, running exactly the same query with different
# groupby settings will enable searched to perform expensive
# full-text search and ranking operation only once, but compute
# multiple groupby results from its output.
#
# Parameters are exactly the same as in <tt>Query</tt> call.
# Returns index to results array returned by <tt>RunQueries</tt> call.
def AddQuery(query, index = '*', comment = '')
# build request
# mode and limits
request = Request.new
request.put_int @offset, @limit, @mode, @ranker, @sort
request.put_string @sortby
# query itself
request.put_string query
# weights
request.put_int_array @weights
# indexes
request.put_string index
# id64 range marker
request.put_int 1
# id64 range
request.put_int64 @min_id.to_i, @max_id.to_i
# filters
request.put_int @filters.length
@filters.each do |filter|
request.put_string filter['attr']
request.put_int filter['type']
case filter['type']
when SPH_FILTER_VALUES
request.put_int_array filter['values']
when SPH_FILTER_RANGE
request.put_int filter['min'], filter['max']
when SPH_FILTER_FLOATRANGE
request.put_float filter['min'], filter['max']
else
raise SphinxInternalError, 'Internal error: unhandled filter type'
end
request.put_int filter['exclude'] ? 1 : 0
end
# group-by clause, max-matches count, group-sort clause, cutoff count
request.put_int @groupfunc
request.put_string @groupby
request.put_int @maxmatches
request.put_string @groupsort
request.put_int @cutoff, @retrycount, @retrydelay
request.put_string @groupdistinct
# anchor point
if @anchor.empty?
request.put_int 0
else
request.put_int 1
request.put_string @anchor['attrlat'], @anchor['attrlong']
request.put_float @anchor['lat'], @anchor['long']
end
# per-index weights
request.put_int @indexweights.length
@indexweights.each do |idx, weight|
request.put_string idx
request.put_int weight
end
# max query time
request.put_int @maxquerytime
# per-field weights
request.put_int @fieldweights.length
@fieldweights.each do |field, weight|
request.put_string field
request.put_int weight
end
request.put_string comment
# store request to requests array
@reqs << request.to_s;
return @reqs.length - 1
end
# Run queries batch.
#
# Returns an array of result sets on success.
# Returns false on network IO failure.
#
# Each result set in returned array is a hash which containts
# the same keys as the hash returned by <tt>Query</tt>, plus:
#
# * <tt>'error'</tt> -- search error for this query
# * <tt>'words'</tt> -- hash which maps query terms (stemmed!) to ( "docs", "hits" ) hash
def RunQueries
if @reqs.empty?
@error = 'No queries defined, issue AddQuery() first'
return false
end
req = @reqs.join('')
nreqs = @reqs.length
@reqs = []
response = PerformRequest(:search, req, nreqs)
# parse response
begin
results = []
ires = 0
while ires < nreqs
ires += 1
result = {}
result['error'] = ''
result['warning'] = ''
# extract status
status = result['status'] = response.get_int
if status != SEARCHD_OK
message = response.get_string
if status == SEARCHD_WARNING
result['warning'] = message
else
result['error'] = message
results << result
next
end
end
# read schema
fields = []
attrs = {}
attrs_names_in_order = []
nfields = response.get_int
while nfields > 0
nfields -= 1
fields << response.get_string
end
result['fields'] = fields
nattrs = response.get_int
while nattrs > 0
nattrs -= 1
attr = response.get_string
type = response.get_int
attrs[attr] = type
attrs_names_in_order << attr
end
result['attrs'] = attrs
# read match count
count = response.get_int
id64 = response.get_int
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -